mirror of
https://github.com/scroll-tech/scroll.git
synced 2026-01-11 23:18:07 -05:00
Compare commits
21 Commits
v4.5.42
...
feat/valid
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
07a9f0e106 | ||
|
|
ad13b56d7c | ||
|
|
722cc5ee76 | ||
|
|
ab8df8e4b5 | ||
|
|
755ed6074e | ||
|
|
0d6eaf74fc | ||
|
|
953ba50c07 | ||
|
|
9998069515 | ||
|
|
fcda68b5b3 | ||
|
|
f3d1b151b2 | ||
|
|
e33d11ddc7 | ||
|
|
642ee2f975 | ||
|
|
8fcd27333f | ||
|
|
f640ef9377 | ||
|
|
de7f6e56a9 | ||
|
|
3b323198dc | ||
|
|
c11e0283e8 | ||
|
|
a5a7844646 | ||
|
|
7ff5b190ec | ||
|
|
b297edd28d | ||
|
|
47c85d4983 |
2
.github/workflows/common.yml
vendored
2
.github/workflows/common.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-2025-02-14
|
||||
toolchain: nightly-2025-08-18
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- name: Install Go
|
||||
|
||||
2
.github/workflows/coordinator.yml
vendored
2
.github/workflows/coordinator.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-2025-02-14
|
||||
toolchain: nightly-2025-08-18
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- name: Install Go
|
||||
|
||||
6
.github/workflows/intermediate-docker.yml
vendored
6
.github/workflows/intermediate-docker.yml
vendored
@@ -22,11 +22,9 @@ on:
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- nightly-2023-12-03
|
||||
- nightly-2022-12-10
|
||||
- 1.86.0
|
||||
- nightly-2025-02-14
|
||||
default: "nightly-2023-12-03"
|
||||
- nightly-2025-08-18
|
||||
default: "nightly-2025-08-18"
|
||||
PYTHON_VERSION:
|
||||
description: "Python version"
|
||||
required: false
|
||||
|
||||
2219
Cargo.lock
generated
2219
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
43
Cargo.toml
43
Cargo.toml
@@ -14,15 +14,16 @@ edition = "2021"
|
||||
homepage = "https://scroll.io"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/scroll-tech/scroll"
|
||||
version = "4.5.8"
|
||||
version = "4.5.47"
|
||||
|
||||
[workspace.dependencies]
|
||||
scroll-zkvm-prover-euclid = { git = "https://github.com/scroll-tech/zkvm-prover", branch = "feat/0.5.1", package = "scroll-zkvm-prover" }
|
||||
scroll-zkvm-verifier-euclid = { git = "https://github.com/scroll-tech/zkvm-prover", branch = "feat/0.5.1", package = "scroll-zkvm-verifier" }
|
||||
scroll-zkvm-types = { git = "https://github.com/scroll-tech/zkvm-prover", branch = "feat/0.5.1" }
|
||||
scroll-zkvm-prover = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "5c361ad" }
|
||||
scroll-zkvm-verifier = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "5c361ad" }
|
||||
scroll-zkvm-types = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "5c361ad" }
|
||||
|
||||
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "chore/openvm-1.3", features = ["scroll"] }
|
||||
sbv-utils = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "chore/openvm-1.3" }
|
||||
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "master", features = ["scroll", "rkyv"] }
|
||||
sbv-utils = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "master" }
|
||||
sbv-core = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "master", features = ["scroll"] }
|
||||
|
||||
metrics = "0.23.0"
|
||||
metrics-util = "0.17"
|
||||
@@ -30,7 +31,8 @@ metrics-tracing-context = "0.16.0"
|
||||
|
||||
anyhow = "1.0"
|
||||
alloy = { version = "1", default-features = false }
|
||||
alloy-primitives = { version = "1.2", default-features = false, features = ["tiny-keccak"] }
|
||||
alloy-primitives = { version = "1.3", default-features = false, features = ["tiny-keccak"] }
|
||||
alloy-sol-types = { version = "1.3", default-features = false }
|
||||
# also use this to trigger "serde" feature for primitives
|
||||
alloy-serde = { version = "1", default-features = false }
|
||||
|
||||
@@ -46,21 +48,20 @@ once_cell = "1.20"
|
||||
base64 = "0.22"
|
||||
|
||||
[patch.crates-io]
|
||||
revm = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-bytecode = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-context = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-context-interface = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-database = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-database-interface = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-handler = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-inspector = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-interpreter = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-precompile = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-primitives = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-state = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-bytecode = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-context = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-context-interface = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-database = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-database-interface = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-handler = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-inspector = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-interpreter = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-precompile = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-primitives = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-state = { git = "https://github.com/scroll-tech/revm" }
|
||||
|
||||
ruint = { git = "https://github.com/scroll-tech/uint.git", branch = "v1.15.0" }
|
||||
alloy-primitives = { git = "https://github.com/scroll-tech/alloy-core", branch = "v1.2.0" }
|
||||
alloy-primitives = { git = "https://github.com/scroll-tech/alloy-core", branch = "feat/rkyv" }
|
||||
|
||||
[profile.maxperf]
|
||||
inherits = "release"
|
||||
|
||||
@@ -10,7 +10,7 @@ require (
|
||||
github.com/go-redis/redis/v8 v8.11.5
|
||||
github.com/pressly/goose/v3 v3.16.0
|
||||
github.com/prometheus/client_golang v1.19.0
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250825071838-cddc263e5ef6
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250729113104-bd8f141bb3e9
|
||||
github.com/stretchr/testify v1.9.0
|
||||
github.com/urfave/cli/v2 v2.25.7
|
||||
|
||||
@@ -309,8 +309,8 @@ github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
|
||||
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250825071838-cddc263e5ef6 h1:EiLZgoRfGgF0QzZ1jGSlo60A6W7g9L/8XLWMhwwtKJ0=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250825071838-cddc263e5ef6/go.mod h1:Z6kN5u2khPhiqHyk172kGB7o38bH/nj7Ilrb/46wZGg=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178 h1:4utngmJHXSOS5FoSdZhEV1xMRirpArbXvyoCZY9nYj0=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178/go.mod h1:Z6kN5u2khPhiqHyk172kGB7o38bH/nj7Ilrb/46wZGg=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250729113104-bd8f141bb3e9 h1:u371VK8eOU2Z/0SVf5KDI3eJc8msHSpJbav4do/8n38=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250729113104-bd8f141bb3e9/go.mod h1:pDCZ4iGvEGmdIe4aSAGBrb7XSrKEML6/L/wEMmNxOdk=
|
||||
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=
|
||||
|
||||
@@ -184,7 +184,7 @@ require (
|
||||
github.com/rjeczalik/notify v0.9.1 // indirect
|
||||
github.com/rs/cors v1.7.0 // indirect
|
||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250825071838-cddc263e5ef6 // indirect
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178 // indirect
|
||||
github.com/scroll-tech/zktrie v0.8.4 // indirect
|
||||
github.com/secure-systems-lab/go-securesystemslib v0.4.0 // indirect
|
||||
github.com/serialx/hashring v0.0.0-20190422032157-8b2912629002 // indirect
|
||||
|
||||
@@ -636,8 +636,8 @@ github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
|
||||
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250825071838-cddc263e5ef6 h1:EiLZgoRfGgF0QzZ1jGSlo60A6W7g9L/8XLWMhwwtKJ0=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250825071838-cddc263e5ef6/go.mod h1:Z6kN5u2khPhiqHyk172kGB7o38bH/nj7Ilrb/46wZGg=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178 h1:4utngmJHXSOS5FoSdZhEV1xMRirpArbXvyoCZY9nYj0=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178/go.mod h1:Z6kN5u2khPhiqHyk172kGB7o38bH/nj7Ilrb/46wZGg=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250625112225-a67863c65587 h1:wG1+gb+K4iLtxAHhiAreMdIjP5x9hB64duraN2+u1QU=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250625112225-a67863c65587/go.mod h1:YyfB2AyAtphlbIuDQgaxc2b9mo0zE4EBA1+qtXvzlmg=
|
||||
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=
|
||||
|
||||
@@ -135,10 +135,18 @@ type BlockContextV2 struct {
|
||||
NumL1Msgs uint16 `json:"num_l1_msgs"`
|
||||
}
|
||||
|
||||
// Metric data carried with OpenVMProof
|
||||
type OpenVMProofStat struct {
|
||||
TotalCycle uint64 `json:"total_cycles"`
|
||||
ExecutionTimeMills uint64 `json:"execution_time_mills"`
|
||||
ProvingTimeMills uint64 `json:"proving_time_mills"`
|
||||
}
|
||||
|
||||
// Proof for flatten VM proof
|
||||
type OpenVMProof struct {
|
||||
Proof []byte `json:"proofs"`
|
||||
PublicValues []byte `json:"public_values"`
|
||||
Proof []byte `json:"proofs"`
|
||||
PublicValues []byte `json:"public_values"`
|
||||
Stat *OpenVMProofStat `json:"stat,omitempty"`
|
||||
}
|
||||
|
||||
// Proof for flatten EVM proof
|
||||
@@ -150,7 +158,8 @@ type OpenVMEvmProof struct {
|
||||
// OpenVMChunkProof includes the proof info that are required for chunk verification and rollup.
|
||||
type OpenVMChunkProof struct {
|
||||
MetaData struct {
|
||||
ChunkInfo *ChunkInfo `json:"chunk_info"`
|
||||
ChunkInfo *ChunkInfo `json:"chunk_info"`
|
||||
TotalGasUsed uint64 `json:"chunk_total_gas"`
|
||||
} `json:"metadata"`
|
||||
|
||||
VmProof *OpenVMProof `json:"proof"`
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
"runtime/debug"
|
||||
)
|
||||
|
||||
var tag = "v4.5.42"
|
||||
var tag = "v4.5.46"
|
||||
|
||||
var commit = func() string {
|
||||
if info, ok := debug.ReadBuildInfo(); ok {
|
||||
|
||||
@@ -34,6 +34,13 @@ coordinator_cron:
|
||||
coordinator_tool:
|
||||
go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/coordinator_tool ./cmd/tool
|
||||
|
||||
localsetup: coordinator_api ## Local setup: build coordinator_api, copy config, and setup releases
|
||||
@echo "Copying configuration files..."
|
||||
cp -r $(PWD)/conf $(PWD)/build/bin/
|
||||
@echo "Setting up releases..."
|
||||
cd $(PWD)/build && bash setup_releases.sh
|
||||
|
||||
|
||||
#coordinator_api_skip_libzkp:
|
||||
# go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/coordinator_api ./cmd/api
|
||||
|
||||
|
||||
62
coordinator/build/setup_releases.sh
Normal file
62
coordinator/build/setup_releases.sh
Normal file
@@ -0,0 +1,62 @@
|
||||
#!/bin/bash
|
||||
|
||||
# release version
|
||||
if [ -z "${SCROLL_ZKVM_VERSION}" ]; then
|
||||
echo "SCROLL_ZKVM_VERSION not set"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# set ASSET_DIR by reading from config.json
|
||||
CONFIG_FILE="bin/conf/config.json"
|
||||
if [ ! -f "$CONFIG_FILE" ]; then
|
||||
echo "Config file $CONFIG_FILE not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# get the number of verifiers in the array
|
||||
VERIFIER_COUNT=$(jq -r '.prover_manager.verifier.verifiers | length' "$CONFIG_FILE")
|
||||
|
||||
if [ "$VERIFIER_COUNT" = "null" ] || [ "$VERIFIER_COUNT" -eq 0 ]; then
|
||||
echo "No verifiers found in config file"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Found $VERIFIER_COUNT verifier(s) in config"
|
||||
|
||||
# iterate through each verifier entry
|
||||
for ((i=0; i<$VERIFIER_COUNT; i++)); do
|
||||
# extract assets_path for current verifier
|
||||
ASSETS_PATH=$(jq -r ".prover_manager.verifier.verifiers[$i].assets_path" "$CONFIG_FILE")
|
||||
FORK_NAME=$(jq -r ".prover_manager.verifier.verifiers[$i].fork_name" "$CONFIG_FILE")
|
||||
|
||||
if [ "$ASSETS_PATH" = "null" ]; then
|
||||
echo "Warning: Could not find assets_path for verifier $i, skipping..."
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "Processing verifier $i ($FORK_NAME): assets_path=$ASSETS_PATH"
|
||||
|
||||
# check if it's an absolute path (starts with /)
|
||||
if [[ "$ASSETS_PATH" = /* ]]; then
|
||||
# absolute path, use as is
|
||||
ASSET_DIR="$ASSETS_PATH"
|
||||
else
|
||||
# relative path, prefix with "bin/"
|
||||
ASSET_DIR="bin/$ASSETS_PATH"
|
||||
fi
|
||||
|
||||
echo "Using ASSET_DIR: $ASSET_DIR"
|
||||
|
||||
# create directory if it doesn't exist
|
||||
mkdir -p "$ASSET_DIR"
|
||||
|
||||
# assets for verifier-only mode
|
||||
echo "Downloading assets for $FORK_NAME to $ASSET_DIR..."
|
||||
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/verifier/verifier.bin -O ${ASSET_DIR}/verifier.bin
|
||||
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/verifier/openVmVk.json -O ${ASSET_DIR}/openVmVk.json
|
||||
|
||||
echo "Completed downloading assets for $FORK_NAME"
|
||||
echo "---"
|
||||
done
|
||||
|
||||
echo "All verifier assets downloaded successfully"
|
||||
@@ -12,7 +12,7 @@
|
||||
{
|
||||
"assets_path": "assets",
|
||||
"fork_name": "euclidV2"
|
||||
},
|
||||
},
|
||||
{
|
||||
"assets_path": "assets",
|
||||
"fork_name": "feynman"
|
||||
|
||||
@@ -9,7 +9,7 @@ require (
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/mitchellh/mapstructure v1.5.0
|
||||
github.com/prometheus/client_golang v1.19.0
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250825071838-cddc263e5ef6
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250626110859-cc9a1dd82de7
|
||||
github.com/shopspring/decimal v1.3.1
|
||||
github.com/stretchr/testify v1.10.0
|
||||
|
||||
@@ -253,8 +253,8 @@ github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
|
||||
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250825071838-cddc263e5ef6 h1:EiLZgoRfGgF0QzZ1jGSlo60A6W7g9L/8XLWMhwwtKJ0=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250825071838-cddc263e5ef6/go.mod h1:Z6kN5u2khPhiqHyk172kGB7o38bH/nj7Ilrb/46wZGg=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178 h1:4utngmJHXSOS5FoSdZhEV1xMRirpArbXvyoCZY9nYj0=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178/go.mod h1:Z6kN5u2khPhiqHyk172kGB7o38bH/nj7Ilrb/46wZGg=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250626110859-cc9a1dd82de7 h1:1rN1qocsQlOyk1VCpIEF1J5pfQbLAi1pnMZSLQS37jQ=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250626110859-cc9a1dd82de7/go.mod h1:pDCZ4iGvEGmdIe4aSAGBrb7XSrKEML6/L/wEMmNxOdk=
|
||||
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=
|
||||
|
||||
@@ -57,14 +57,16 @@ type Config struct {
|
||||
|
||||
// AssetConfig contain assets configurated for each fork, the defaul vkfile name is "OpenVmVk.json".
|
||||
type AssetConfig struct {
|
||||
AssetsPath string `json:"assets_path"`
|
||||
ForkName string `json:"fork_name"`
|
||||
Vkfile string `json:"vk_file,omitempty"`
|
||||
AssetsPath string `json:"assets_path"`
|
||||
ForkName string `json:"fork_name"`
|
||||
Vkfile string `json:"vk_file,omitempty"`
|
||||
MinProverVersion string `json:"min_prover_version,omitempty"`
|
||||
}
|
||||
|
||||
// VerifierConfig load zk verifier config.
|
||||
type VerifierConfig struct {
|
||||
MinProverVersion string `json:"min_prover_version"`
|
||||
Features string `json:"features,omitempty"`
|
||||
Verifiers []AssetConfig `json:"verifiers"`
|
||||
}
|
||||
|
||||
|
||||
@@ -24,18 +24,16 @@ type LoginLogic struct {
|
||||
|
||||
openVmVks map[string]struct{}
|
||||
|
||||
proverVersionHardForkMap map[string][]string
|
||||
proverVersionHardForkMap map[string]string
|
||||
}
|
||||
|
||||
// NewLoginLogic new a LoginLogic
|
||||
func NewLoginLogic(db *gorm.DB, cfg *config.Config, vf *verifier.Verifier) *LoginLogic {
|
||||
proverVersionHardForkMap := make(map[string][]string)
|
||||
proverVersionHardForkMap := make(map[string]string)
|
||||
|
||||
var hardForks []string
|
||||
for _, cfg := range cfg.ProverManager.Verifier.Verifiers {
|
||||
hardForks = append(hardForks, cfg.ForkName)
|
||||
proverVersionHardForkMap[cfg.ForkName] = cfg.MinProverVersion
|
||||
}
|
||||
proverVersionHardForkMap[cfg.ProverManager.Verifier.MinProverVersion] = hardForks
|
||||
|
||||
return &LoginLogic{
|
||||
cfg: cfg,
|
||||
@@ -101,9 +99,15 @@ func (l *LoginLogic) ProverHardForkName(login *types.LoginParameter) (string, er
|
||||
}
|
||||
|
||||
proverVersion := proverVersionSplits[0]
|
||||
if hardForkNames, ok := l.proverVersionHardForkMap[proverVersion]; ok {
|
||||
return strings.Join(hardForkNames, ","), nil
|
||||
var hardForkNames []string
|
||||
for n, minVersion := range l.proverVersionHardForkMap {
|
||||
if minVersion == "" || version.CheckScrollRepoVersion(proverVersion, minVersion) {
|
||||
hardForkNames = append(hardForkNames, n)
|
||||
}
|
||||
}
|
||||
if len(hardForkNames) == 0 {
|
||||
return "", fmt.Errorf("invalid prover prover_version:%s", login.Message.ProverVersion)
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("invalid prover prover_version:%s", login.Message.ProverVersion)
|
||||
return strings.Join(hardForkNames, ","), nil
|
||||
}
|
||||
|
||||
@@ -140,3 +140,10 @@ func DumpVk(forkName, filePath string) error {
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Set dynamic feature flags that control libzkp runtime behavior
|
||||
func SetDynamicFeature(feats string) {
|
||||
cFeats := goToCString(feats)
|
||||
defer freeCString(cFeats)
|
||||
C.set_dynamic_feature(cFeats)
|
||||
}
|
||||
|
||||
@@ -54,4 +54,7 @@ char* gen_wrapped_proof(char* proof_json, char* metadata, char* vk, size_t vk_le
|
||||
// Release memory allocated for a string returned by gen_wrapped_proof
|
||||
void release_string(char* string_ptr);
|
||||
|
||||
void set_dynamic_feature(const char* feats);
|
||||
|
||||
|
||||
#endif /* LIBZKP_H */
|
||||
|
||||
@@ -71,6 +71,9 @@ type ProofReceiverLogic struct {
|
||||
validateFailureProverTaskStatusNotOk prometheus.Counter
|
||||
validateFailureProverTaskTimeout prometheus.Counter
|
||||
validateFailureProverTaskHaveVerifier prometheus.Counter
|
||||
proverSpeed *prometheus.GaugeVec
|
||||
provingTime prometheus.Gauge
|
||||
evmCyclePerGas prometheus.Gauge
|
||||
|
||||
ChunkTask provertask.ProverTask
|
||||
BundleTask provertask.ProverTask
|
||||
@@ -79,6 +82,7 @@ type ProofReceiverLogic struct {
|
||||
|
||||
// NewSubmitProofReceiverLogic create a proof receiver logic
|
||||
func NewSubmitProofReceiverLogic(cfg *config.ProverManager, chainCfg *params.ChainConfig, db *gorm.DB, vf *verifier.Verifier, reg prometheus.Registerer) *ProofReceiverLogic {
|
||||
|
||||
return &ProofReceiverLogic{
|
||||
chunkOrm: orm.NewChunk(db),
|
||||
batchOrm: orm.NewBatch(db),
|
||||
@@ -133,6 +137,18 @@ func NewSubmitProofReceiverLogic(cfg *config.ProverManager, chainCfg *params.Cha
|
||||
Name: "coordinator_validate_failure_submit_have_been_verifier",
|
||||
Help: "Total number of submit proof validate failure proof have been verifier.",
|
||||
}),
|
||||
evmCyclePerGas: promauto.With(reg).NewGauge(prometheus.GaugeOpts{
|
||||
Name: "evm_circuit_cycle_per_gas",
|
||||
Help: "VM cycles cost for a gas unit cost in evm execution",
|
||||
}),
|
||||
provingTime: promauto.With(reg).NewGauge(prometheus.GaugeOpts{
|
||||
Name: "chunk_proving_time",
|
||||
Help: "Wall clock time for chunk proving in second",
|
||||
}),
|
||||
proverSpeed: promauto.With(reg).NewGaugeVec(prometheus.GaugeOpts{
|
||||
Name: "prover_speed",
|
||||
Help: "Cycle against running time of prover (in mhz)",
|
||||
}, []string{"type", "phase"}),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -204,12 +220,34 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofParameter coor
|
||||
return unmarshalErr
|
||||
}
|
||||
success, verifyErr = m.verifier.VerifyChunkProof(chunkProof, hardForkName)
|
||||
if stat := chunkProof.VmProof.Stat; stat != nil {
|
||||
if g, _ := m.proverSpeed.GetMetricWithLabelValues("chunk", "exec"); g != nil && stat.ExecutionTimeMills > 0 {
|
||||
g.Set(float64(stat.TotalCycle) / float64(stat.ExecutionTimeMills*1000))
|
||||
}
|
||||
if g, _ := m.proverSpeed.GetMetricWithLabelValues("chunk", "proving"); g != nil && stat.ProvingTimeMills > 0 {
|
||||
g.Set(float64(stat.TotalCycle) / float64(stat.ProvingTimeMills*1000))
|
||||
}
|
||||
if chunkProof.MetaData.TotalGasUsed > 0 {
|
||||
cycle_per_gas := float64(stat.TotalCycle) / float64(chunkProof.MetaData.TotalGasUsed)
|
||||
m.evmCyclePerGas.Set(cycle_per_gas)
|
||||
}
|
||||
m.provingTime.Set(float64(stat.ProvingTimeMills) / 1000)
|
||||
}
|
||||
|
||||
case message.ProofTypeBatch:
|
||||
batchProof := &message.OpenVMBatchProof{}
|
||||
if unmarshalErr := json.Unmarshal([]byte(proofParameter.Proof), &batchProof); unmarshalErr != nil {
|
||||
return unmarshalErr
|
||||
}
|
||||
success, verifyErr = m.verifier.VerifyBatchProof(batchProof, hardForkName)
|
||||
if stat := batchProof.VmProof.Stat; stat != nil {
|
||||
if g, _ := m.proverSpeed.GetMetricWithLabelValues("batch", "exec"); g != nil && stat.ExecutionTimeMills > 0 {
|
||||
g.Set(float64(stat.TotalCycle) / float64(stat.ExecutionTimeMills*1000))
|
||||
}
|
||||
if g, _ := m.proverSpeed.GetMetricWithLabelValues("batch", "proving"); g != nil && stat.ProvingTimeMills > 0 {
|
||||
g.Set(float64(stat.TotalCycle) / float64(stat.ProvingTimeMills*1000))
|
||||
}
|
||||
}
|
||||
case message.ProofTypeBundle:
|
||||
bundleProof := &message.OpenVMBundleProof{}
|
||||
if unmarshalErr := json.Unmarshal([]byte(proofParameter.Proof), &bundleProof); unmarshalErr != nil {
|
||||
|
||||
@@ -4,6 +4,7 @@ package verifier
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
@@ -66,6 +67,9 @@ func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if cfg.Features != "" {
|
||||
libzkp.SetDynamicFeature(cfg.Features)
|
||||
}
|
||||
libzkp.InitVerifier(string(configBytes))
|
||||
|
||||
v := &Verifier{
|
||||
@@ -129,6 +133,23 @@ const blocked_vks = `
|
||||
D6YFHwTLZF/U2zpYJPQ3LwJZRm85yA5Vq2iFBqd3Mk4iwOUpS8sbOp3vg2+NDxhhKphgYpuUlykpdsoRhEt+cw==,
|
||||
`
|
||||
|
||||
// tries to decode s as hex, and if that fails, as base64.
|
||||
func decodeVkString(s string) ([]byte, error) {
|
||||
// Try hex decoding first
|
||||
if b, err := hex.DecodeString(s); err == nil {
|
||||
return b, nil
|
||||
}
|
||||
// Fallback to base64 decoding
|
||||
b, err := base64.StdEncoding.DecodeString(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(b) == 0 {
|
||||
return nil, fmt.Errorf("decode vk string %s fail (empty bytes)", s)
|
||||
}
|
||||
return b, nil
|
||||
}
|
||||
|
||||
func (v *Verifier) loadOpenVMVks(cfg config.AssetConfig) error {
|
||||
|
||||
vkFileName := cfg.Vkfile
|
||||
@@ -165,17 +186,17 @@ func (v *Verifier) loadOpenVMVks(cfg config.AssetConfig) error {
|
||||
v.OpenVMVkMap[dump.Bundle] = struct{}{}
|
||||
log.Info("Load vks", "from", cfg.AssetsPath, "chunk", dump.Chunk, "batch", dump.Batch, "bundle", dump.Bundle)
|
||||
|
||||
decodedBytes, err := base64.StdEncoding.DecodeString(dump.Chunk)
|
||||
decodedBytes, err := decodeVkString(dump.Chunk)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.ChunkVk[cfg.ForkName] = decodedBytes
|
||||
decodedBytes, err = base64.StdEncoding.DecodeString(dump.Batch)
|
||||
decodedBytes, err = decodeVkString(dump.Batch)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.BatchVk[cfg.ForkName] = decodedBytes
|
||||
decodedBytes, err = base64.StdEncoding.DecodeString(dump.Bundle)
|
||||
decodedBytes, err = decodeVkString(dump.Bundle)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -584,7 +584,8 @@ func testTimeoutProof(t *testing.T) {
|
||||
err = chunkOrm.UpdateBatchHashInRange(context.Background(), 0, 100, batch.Hash)
|
||||
assert.NoError(t, err)
|
||||
encodeData, err := json.Marshal(message.OpenVMChunkProof{VmProof: &message.OpenVMProof{}, MetaData: struct {
|
||||
ChunkInfo *message.ChunkInfo `json:"chunk_info"`
|
||||
ChunkInfo *message.ChunkInfo `json:"chunk_info"`
|
||||
TotalGasUsed uint64 `json:"chunk_total_gas"`
|
||||
}{ChunkInfo: &message.ChunkInfo{}}})
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, encodeData)
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
|
||||
[patch."https://github.com/openvm-org/openvm.git"]
|
||||
openvm-build = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
|
||||
openvm-circuit = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
|
||||
openvm-continuations = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
|
||||
openvm-instructions ={ git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
|
||||
openvm-native-circuit = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
|
||||
openvm-native-compiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
|
||||
openvm-native-recursion = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
|
||||
openvm-native-transpiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
|
||||
openvm-rv32im-transpiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
|
||||
openvm-sdk = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false, features = ["parallel", "bench-metrics", "evm-prove"] }
|
||||
openvm-transpiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
|
||||
|
||||
[patch."https://github.com/openvm-org/stark-backend.git"]
|
||||
openvm-stark-backend = { git = "ssh://git@github.com/scroll-tech/openvm-stark-gpu.git", branch = "main", features = ["gpu"] }
|
||||
openvm-stark-sdk = { git = "ssh://git@github.com/scroll-tech/openvm-stark-gpu.git", branch = "main", features = ["gpu"] }
|
||||
|
||||
[patch."https://github.com/Plonky3/Plonky3.git"]
|
||||
p3-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-field = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-commit = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-matrix = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-baby-bear = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", features = [
|
||||
"nightly-features",
|
||||
], tag = "v0.2.1" }
|
||||
p3-koala-bear = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-util = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-challenger = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-dft = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-fri = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-goldilocks = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-keccak = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-keccak-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-blake3 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-mds = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-merkle-tree = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-monty-31 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-poseidon = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-poseidon2 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-poseidon2-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-symmetric = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-uni-stark = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-maybe-rayon = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" } # the "parallel" feature is NOT on by default to allow single-threaded benchmarking
|
||||
p3-bn254-fr = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
2397
crates/gpu_override/Cargo.lock
generated
2397
crates/gpu_override/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,21 +0,0 @@
|
||||
.PHONY: build update clean
|
||||
|
||||
ZKVM_COMMIT ?= freebuild
|
||||
PLONKY3_GPU_VERSION=$(shell ./print_plonky3gpu_version.sh | sed -n '2p')
|
||||
$(info PLONKY3_GPU_VERSION is ${PLONKY3_GPU_VERSION})
|
||||
|
||||
GIT_REV ?= $(shell git rev-parse --short HEAD)
|
||||
GO_TAG ?= $(shell grep "var tag = " ../../common/version/version.go | cut -d "\"" -f2)
|
||||
ZK_VERSION=${ZKVM_COMMIT}-${PLONKY3_GPU_VERSION}
|
||||
$(info ZK_GPU_VERSION is ${ZK_VERSION})
|
||||
|
||||
clean:
|
||||
cargo clean -Z unstable-options --release -p prover --lockfile-path ./Cargo.lock
|
||||
|
||||
# build gpu prover, never touch lock file
|
||||
build:
|
||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build -Z unstable-options --release -p prover --lockfile-path ./Cargo.lock
|
||||
|
||||
# update Cargo.lock while override config has been updated
|
||||
#update:
|
||||
# GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build -Z unstable-options --release -p prover --lockfile-path ./Cargo.lock
|
||||
@@ -1,15 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
config_file=.cargo/config.toml
|
||||
plonky3_gpu_path=$(grep 'path.*plonky3-gpu' "$config_file" | cut -d'"' -f2 | head -n 1)
|
||||
plonky3_gpu_path=$(dirname "$plonky3_gpu_path")
|
||||
|
||||
if [ -z $plonky3_gpu_path ]; then
|
||||
exit 0
|
||||
else
|
||||
pushd $plonky3_gpu_path
|
||||
commit_hash=$(git log --pretty=format:%h -n 1)
|
||||
echo "${commit_hash:0:7}"
|
||||
|
||||
popd
|
||||
fi
|
||||
@@ -13,6 +13,7 @@ libzkp = { path = "../libzkp" }
|
||||
alloy = { workspace = true, features = ["provider-http", "transport-http", "reqwest", "reqwest-rustls-tls", "json-rpc"] }
|
||||
sbv-primitives = { workspace = true, features = ["scroll"] }
|
||||
sbv-utils = { workspace = true, features = ["scroll"] }
|
||||
sbv-core = { workspace = true, features = ["scroll"] }
|
||||
|
||||
eyre.workspace = true
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ pub fn init(config: &str) -> eyre::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_client() -> rpc_client::RpcClient<'static> {
|
||||
pub fn get_client() -> impl libzkp::tasks::ChunkInterpreter {
|
||||
GLOBAL_L2GETH_CLI
|
||||
.get()
|
||||
.expect("must has been inited")
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use alloy::{
|
||||
providers::{Provider, ProviderBuilder, RootProvider},
|
||||
providers::{Provider, ProviderBuilder},
|
||||
rpc::client::ClientBuilder,
|
||||
transports::layers::RetryBackoffLayer,
|
||||
};
|
||||
@@ -49,13 +49,13 @@ pub struct RpcConfig {
|
||||
/// so it can be run in block mode (i.e. inside dynamic library without a global entry)
|
||||
pub struct RpcClientCore {
|
||||
/// rpc prover
|
||||
provider: RootProvider<Network>,
|
||||
client: alloy::rpc::client::RpcClient,
|
||||
rt: tokio::runtime::Runtime,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct RpcClient<'a> {
|
||||
provider: &'a RootProvider<Network>,
|
||||
pub struct RpcClient<'a, T: Provider<Network>> {
|
||||
provider: T,
|
||||
handle: &'a tokio::runtime::Handle,
|
||||
}
|
||||
|
||||
@@ -75,80 +75,78 @@ impl RpcClientCore {
|
||||
let retry_layer = RetryBackoffLayer::new(config.max_retry, config.backoff, config.cups);
|
||||
let client = ClientBuilder::default().layer(retry_layer).http(rpc);
|
||||
|
||||
Ok(Self {
|
||||
provider: ProviderBuilder::<_, _, Network>::default().connect_client(client),
|
||||
rt,
|
||||
})
|
||||
Ok(Self { client, rt })
|
||||
}
|
||||
|
||||
pub fn get_client(&self) -> RpcClient {
|
||||
pub fn get_client(&self) -> RpcClient<'_, impl Provider<Network>> {
|
||||
RpcClient {
|
||||
provider: &self.provider,
|
||||
provider: ProviderBuilder::<_, _, Network>::default()
|
||||
.connect_client(self.client.clone()),
|
||||
handle: self.rt.handle(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ChunkInterpreter for RpcClient<'_> {
|
||||
impl<T: Provider<Network>> ChunkInterpreter for RpcClient<'_, T> {
|
||||
fn try_fetch_block_witness(
|
||||
&self,
|
||||
block_hash: sbv_primitives::B256,
|
||||
prev_witness: Option<&sbv_primitives::types::BlockWitness>,
|
||||
) -> Result<sbv_primitives::types::BlockWitness> {
|
||||
prev_witness: Option<&sbv_core::BlockWitness>,
|
||||
) -> Result<sbv_core::BlockWitness> {
|
||||
async fn fetch_witness_async(
|
||||
provider: &RootProvider<Network>,
|
||||
provider: impl Provider<Network>,
|
||||
block_hash: sbv_primitives::B256,
|
||||
prev_witness: Option<&sbv_primitives::types::BlockWitness>,
|
||||
) -> Result<sbv_primitives::types::BlockWitness> {
|
||||
use sbv_utils::{rpc::ProviderExt, witness::WitnessBuilder};
|
||||
prev_witness: Option<&sbv_core::BlockWitness>,
|
||||
) -> Result<sbv_core::BlockWitness> {
|
||||
use sbv_utils::rpc::ProviderExt;
|
||||
|
||||
let chain_id = provider.get_chain_id().await?;
|
||||
let (chain_id, block_num, prev_state_root) = if let Some(w) = prev_witness {
|
||||
(w.chain_id, w.header.number + 1, w.header.state_root)
|
||||
} else {
|
||||
let chain_id = provider.get_chain_id().await?;
|
||||
let block = provider
|
||||
.get_block_by_hash(block_hash)
|
||||
.full()
|
||||
.await?
|
||||
.ok_or_else(|| eyre::eyre!("Block {block_hash} not found"))?;
|
||||
|
||||
let block = provider
|
||||
.get_block_by_hash(block_hash)
|
||||
.full()
|
||||
.await?
|
||||
.ok_or_else(|| eyre::eyre!("Block {block_hash} not found"))?;
|
||||
let parent_block = provider
|
||||
.get_block_by_hash(block.header.parent_hash)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
eyre::eyre!(
|
||||
"parent block for block {} should exist",
|
||||
block.header.number
|
||||
)
|
||||
})?;
|
||||
|
||||
let number = block.header.number;
|
||||
let parent_hash = block.header.parent_hash;
|
||||
if number == 0 {
|
||||
eyre::bail!("no number in header or use block 0");
|
||||
}
|
||||
|
||||
let mut witness_builder = WitnessBuilder::new()
|
||||
.block(block)
|
||||
.chain_id(chain_id)
|
||||
.execution_witness(provider.debug_execution_witness(number.into()).await?);
|
||||
|
||||
let prev_state_root = match prev_witness {
|
||||
Some(witness) => {
|
||||
if witness.header.number != number - 1 {
|
||||
eyre::bail!(
|
||||
"the ref witness is not the previous block, expected {} get {}",
|
||||
number - 1,
|
||||
witness.header.number,
|
||||
);
|
||||
}
|
||||
witness.header.state_root
|
||||
}
|
||||
None => {
|
||||
let parent_block = provider
|
||||
.get_block_by_hash(parent_hash)
|
||||
.await?
|
||||
.expect("parent block should exist");
|
||||
|
||||
parent_block.header.state_root
|
||||
}
|
||||
(
|
||||
chain_id,
|
||||
block.header.number,
|
||||
parent_block.header.state_root,
|
||||
)
|
||||
};
|
||||
witness_builder = witness_builder.prev_state_root(prev_state_root);
|
||||
|
||||
Ok(witness_builder.build()?)
|
||||
let req = provider
|
||||
.dump_block_witness(block_num)
|
||||
.with_chain_id(chain_id)
|
||||
.with_prev_state_root(prev_state_root);
|
||||
|
||||
let witness = req
|
||||
.send()
|
||||
.await
|
||||
.transpose()
|
||||
.ok_or_else(|| eyre::eyre!("Block witness {block_num} not available"))??;
|
||||
|
||||
Ok(witness)
|
||||
}
|
||||
|
||||
tracing::debug!("fetch witness for {block_hash}");
|
||||
self.handle
|
||||
.block_on(fetch_witness_async(self.provider, block_hash, prev_witness))
|
||||
self.handle.block_on(fetch_witness_async(
|
||||
&self.provider,
|
||||
block_hash,
|
||||
prev_witness,
|
||||
))
|
||||
}
|
||||
|
||||
fn try_fetch_storage_node(
|
||||
@@ -156,7 +154,7 @@ impl ChunkInterpreter for RpcClient<'_> {
|
||||
node_hash: sbv_primitives::B256,
|
||||
) -> Result<sbv_primitives::Bytes> {
|
||||
async fn fetch_storage_node_async(
|
||||
provider: &RootProvider<Network>,
|
||||
provider: impl Provider<Network>,
|
||||
node_hash: sbv_primitives::B256,
|
||||
) -> Result<sbv_primitives::Bytes> {
|
||||
let ret = provider
|
||||
@@ -168,7 +166,7 @@ impl ChunkInterpreter for RpcClient<'_> {
|
||||
|
||||
tracing::debug!("fetch storage node for {node_hash}");
|
||||
self.handle
|
||||
.block_on(fetch_storage_node_async(self.provider, node_hash))
|
||||
.block_on(fetch_storage_node_async(&self.provider, node_hash))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -194,10 +192,10 @@ mod tests {
|
||||
let client_core = RpcClientCore::create(&config).expect("Failed to create RPC client");
|
||||
let client = client_core.get_client();
|
||||
|
||||
// latest - 1 block in 2025.6.15
|
||||
// latest - 1 block in 2025.9.11
|
||||
let block_hash = B256::from(
|
||||
hex::const_decode_to_array(
|
||||
b"0x9535a6970bc4db9031749331a214e35ed8c8a3f585f6f456d590a0bc780a1368",
|
||||
b"0x093fb6bf2e556a659b35428ac447cd9f0635382fc40ffad417b5910824f9e932",
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
@@ -207,10 +205,10 @@ mod tests {
|
||||
.try_fetch_block_witness(block_hash, None)
|
||||
.expect("should success");
|
||||
|
||||
// latest block in 2025.6.15
|
||||
// block selected in 2025.9.11
|
||||
let block_hash = B256::from(
|
||||
hex::const_decode_to_array(
|
||||
b"0xd47088cdb6afc68aa082e633bb7da9340d29c73841668afacfb9c1e66e557af0",
|
||||
b"0x77cc84dd7a4dedf6fe5fb9b443aeb5a4fb0623ad088a365d3232b7b23fc848e5",
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
@@ -220,26 +218,4 @@ mod tests {
|
||||
|
||||
println!("{}", serde_json::to_string_pretty(&wit2).unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore = "Requires L2GETH_ENDPOINT environment variable"]
|
||||
fn test_try_fetch_storage_node() {
|
||||
let config = create_config_from_env();
|
||||
let client_core = RpcClientCore::create(&config).expect("Failed to create RPC client");
|
||||
let client = client_core.get_client();
|
||||
|
||||
// the root node (state root) of the block in unittest above
|
||||
let node_hash = B256::from(
|
||||
hex::const_decode_to_array(
|
||||
b"0xb9e67403a2eb35afbb0475fe942918cf9a330a1d7532704c24554506be62b27c",
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
|
||||
// This is expected to fail since we're using a dummy hash, but it tests the code path
|
||||
let node = client
|
||||
.try_fetch_storage_node(node_hash)
|
||||
.expect("should success");
|
||||
println!("{}", serde_json::to_string_pretty(&node).unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,10 +6,11 @@ edition.workspace = true
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[dependencies]
|
||||
scroll-zkvm-types.workspace = true
|
||||
scroll-zkvm-verifier-euclid.workspace = true
|
||||
scroll-zkvm-verifier.workspace = true
|
||||
|
||||
alloy-primitives.workspace = true #depress the effect of "native-keccak"
|
||||
sbv-primitives = {workspace = true, features = ["scroll-compress-ratio", "scroll"]}
|
||||
sbv-core = { workspace = true, features = ["scroll"] }
|
||||
base64.workspace = true
|
||||
serde.workspace = true
|
||||
serde_derive.workspace = true
|
||||
@@ -18,6 +19,7 @@ tracing.workspace = true
|
||||
eyre.workspace = true
|
||||
|
||||
git-version = "0.3.5"
|
||||
bincode = { version = "2", features = ["serde"] }
|
||||
serde_stacker = "0.1"
|
||||
regex = "1.11"
|
||||
c-kzg = { version = "2.0", features = ["serde"] }
|
||||
|
||||
@@ -5,12 +5,33 @@ pub use verifier::{TaskType, VerifierConfig};
|
||||
mod utils;
|
||||
|
||||
use sbv_primitives::B256;
|
||||
use scroll_zkvm_types::util::vec_as_base64;
|
||||
use scroll_zkvm_types::utils::vec_as_base64;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::value::RawValue;
|
||||
use std::path::Path;
|
||||
use tasks::chunk_interpreter::{ChunkInterpreter, TryFromWithInterpreter};
|
||||
|
||||
/// global features: use legacy encoding for witness
|
||||
static mut LEGACY_WITNESS_ENCODING: bool = false;
|
||||
pub(crate) fn witness_use_legacy_mode() -> bool {
|
||||
unsafe { LEGACY_WITNESS_ENCODING }
|
||||
}
|
||||
|
||||
pub fn set_dynamic_feature(feats: &str) {
|
||||
for feat_s in feats.split(':') {
|
||||
match feat_s.trim().to_lowercase().as_str() {
|
||||
"legacy_witness" => {
|
||||
tracing::info!("set witness encoding for legacy mode");
|
||||
unsafe {
|
||||
// the function is only called while initialize step
|
||||
LEGACY_WITNESS_ENCODING = true;
|
||||
}
|
||||
}
|
||||
s => tracing::warn!("unrecognized dynamic feature: {s}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Turn the coordinator's chunk task into a json string for formal chunk proving
|
||||
/// task (with full witnesses)
|
||||
pub fn checkout_chunk_task(
|
||||
@@ -32,7 +53,6 @@ pub fn gen_universal_task(
|
||||
task_json: &str,
|
||||
fork_name_str: &str,
|
||||
expected_vk: &[u8],
|
||||
interpreter: Option<impl ChunkInterpreter>,
|
||||
) -> eyre::Result<(B256, String, String)> {
|
||||
use proofs::*;
|
||||
use tasks::*;
|
||||
@@ -56,10 +76,9 @@ pub fn gen_universal_task(
|
||||
if fork_name_str != task.fork_name.as_str() {
|
||||
eyre::bail!("fork name in chunk task not match the calling arg, expected {fork_name_str}, get {}", task.fork_name);
|
||||
}
|
||||
let (pi_hash, metadata, u_task) = utils::panic_catch(move || {
|
||||
gen_universal_chunk_task(task, fork_name_str.into(), interpreter)
|
||||
})
|
||||
.map_err(|e| eyre::eyre!("caught panic in chunk task{e}"))??;
|
||||
let (pi_hash, metadata, u_task) =
|
||||
utils::panic_catch(move || gen_universal_chunk_task(task, fork_name_str.into()))
|
||||
.map_err(|e| eyre::eyre!("caught panic in chunk task{e}"))??;
|
||||
(pi_hash, AnyMetaData::Chunk(metadata), u_task)
|
||||
}
|
||||
x if x == TaskType::Batch as i32 => {
|
||||
|
||||
@@ -7,10 +7,10 @@ use scroll_zkvm_types::{
|
||||
batch::BatchInfo,
|
||||
bundle::BundleInfo,
|
||||
chunk::ChunkInfo,
|
||||
proof::{EvmProof, OpenVmEvmProof, ProofEnum, RootProof},
|
||||
proof::{EvmProof, OpenVmEvmProof, ProofEnum, StarkProof},
|
||||
public_inputs::{ForkName, MultiVersionPublicInputs},
|
||||
types_agg::{AggregationInput, ProgramCommitment},
|
||||
util::vec_as_base64,
|
||||
types_agg::AggregationInput,
|
||||
utils::{serialize_vk, vec_as_base64},
|
||||
};
|
||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||
|
||||
@@ -40,7 +40,7 @@ pub struct WrappedProof<Metadata> {
|
||||
}
|
||||
|
||||
pub trait AsRootProof {
|
||||
fn as_root_proof(&self) -> &RootProof;
|
||||
fn as_root_proof(&self) -> &StarkProof;
|
||||
}
|
||||
|
||||
pub trait AsEvmProof {
|
||||
@@ -61,17 +61,17 @@ pub type BatchProof = WrappedProof<BatchProofMetadata>;
|
||||
pub type BundleProof = WrappedProof<BundleProofMetadata>;
|
||||
|
||||
impl AsRootProof for ChunkProof {
|
||||
fn as_root_proof(&self) -> &RootProof {
|
||||
fn as_root_proof(&self) -> &StarkProof {
|
||||
self.proof
|
||||
.as_root_proof()
|
||||
.as_stark_proof()
|
||||
.expect("batch proof use root proof")
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRootProof for BatchProof {
|
||||
fn as_root_proof(&self) -> &RootProof {
|
||||
fn as_root_proof(&self) -> &StarkProof {
|
||||
self.proof
|
||||
.as_root_proof()
|
||||
.as_stark_proof()
|
||||
.expect("batch proof use root proof")
|
||||
}
|
||||
}
|
||||
@@ -122,6 +122,8 @@ pub trait PersistableProof: Sized {
|
||||
pub struct ChunkProofMetadata {
|
||||
/// The chunk information describing the list of blocks contained within the chunk.
|
||||
pub chunk_info: ChunkInfo,
|
||||
/// Additional data for stat
|
||||
pub chunk_total_gas: u64,
|
||||
}
|
||||
|
||||
impl ProofMetadata for ChunkProofMetadata {
|
||||
@@ -170,7 +172,7 @@ impl<Metadata> From<&WrappedProof<Metadata>> for AggregationInput {
|
||||
fn from(value: &WrappedProof<Metadata>) -> Self {
|
||||
Self {
|
||||
public_values: value.proof.public_values(),
|
||||
commitment: ProgramCommitment::deserialize(&value.vk),
|
||||
commitment: serialize_vk::deserialize(&value.vk),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -250,6 +252,7 @@ mod tests {
|
||||
batch_hash: B256::repeat_byte(4),
|
||||
withdraw_root: B256::repeat_byte(5),
|
||||
msg_queue_hash: B256::repeat_byte(6),
|
||||
encryption_key: None,
|
||||
};
|
||||
let bundle_pi_hash = bundle_info.pi_hash(ForkName::EuclidV1);
|
||||
BundleProofMetadata {
|
||||
|
||||
@@ -16,6 +16,11 @@ use crate::{
|
||||
use sbv_primitives::B256;
|
||||
use scroll_zkvm_types::public_inputs::{ForkName, MultiVersionPublicInputs};
|
||||
|
||||
fn encode_task_to_witness<T: serde::Serialize>(task: &T) -> eyre::Result<Vec<u8>> {
|
||||
let config = bincode::config::standard();
|
||||
Ok(bincode::serde::encode_to_vec(task, config)?)
|
||||
}
|
||||
|
||||
fn check_aggregation_proofs<Metadata>(
|
||||
proofs: &[proofs::WrappedProof<Metadata>],
|
||||
fork_name: ForkName,
|
||||
@@ -25,9 +30,9 @@ where
|
||||
{
|
||||
panic_catch(|| {
|
||||
for w in proofs.windows(2) {
|
||||
w[1].metadata
|
||||
.pi_hash_info()
|
||||
.validate(w[0].metadata.pi_hash_info(), fork_name);
|
||||
// w[1].metadata
|
||||
// .pi_hash_info()
|
||||
// .validate(w[0].metadata.pi_hash_info(), fork_name);
|
||||
}
|
||||
})
|
||||
.map_err(|e| eyre::eyre!("Chunk data validation failed: {}", e))?;
|
||||
@@ -37,19 +42,19 @@ where
|
||||
|
||||
/// Generate required staff for chunk proving
|
||||
pub fn gen_universal_chunk_task(
|
||||
mut task: ChunkProvingTask,
|
||||
task: ChunkProvingTask,
|
||||
fork_name: ForkName,
|
||||
interpreter: Option<impl ChunkInterpreter>,
|
||||
) -> eyre::Result<(B256, ChunkProofMetadata, ProvingTask)> {
|
||||
if let Some(interpreter) = interpreter {
|
||||
task.prepare_task_via_interpret(interpreter)?;
|
||||
}
|
||||
let chunk_total_gas = task.stats().total_gas_used;
|
||||
let chunk_info = task.precheck_and_build_metadata()?;
|
||||
let proving_task = task.try_into()?;
|
||||
let expected_pi_hash = chunk_info.pi_hash_by_fork(fork_name);
|
||||
Ok((
|
||||
expected_pi_hash,
|
||||
ChunkProofMetadata { chunk_info },
|
||||
ChunkProofMetadata {
|
||||
chunk_info,
|
||||
chunk_total_gas,
|
||||
},
|
||||
proving_task,
|
||||
))
|
||||
}
|
||||
|
||||
@@ -4,9 +4,9 @@ use eyre::Result;
|
||||
use sbv_primitives::{B256, U256};
|
||||
use scroll_zkvm_types::{
|
||||
batch::{
|
||||
BatchHeader, BatchHeaderV6, BatchHeaderV7, BatchHeaderV8, BatchInfo, BatchWitness,
|
||||
Envelope, EnvelopeV6, EnvelopeV7, EnvelopeV8, PointEvalWitness, ReferenceHeader,
|
||||
ToArchievedWitness, N_BLOB_BYTES,
|
||||
build_point_eval_witness, BatchHeader, BatchHeaderV6, BatchHeaderV7, BatchHeaderV8,
|
||||
BatchInfo, BatchWitness, Envelope, EnvelopeV6, EnvelopeV7, EnvelopeV8, LegacyBatchWitness,
|
||||
ReferenceHeader, N_BLOB_BYTES,
|
||||
},
|
||||
public_inputs::ForkName,
|
||||
task::ProvingTask,
|
||||
@@ -84,6 +84,12 @@ impl TryFrom<BatchProvingTask> for ProvingTask {
|
||||
|
||||
fn try_from(value: BatchProvingTask) -> Result<Self> {
|
||||
let witness = value.build_guest_input();
|
||||
let serialized_witness = if crate::witness_use_legacy_mode() {
|
||||
let legacy_witness = LegacyBatchWitness::from(witness);
|
||||
to_rkyv_bytes::<RancorError>(&legacy_witness)?.into_vec()
|
||||
} else {
|
||||
super::encode_task_to_witness(&witness)?
|
||||
};
|
||||
|
||||
Ok(ProvingTask {
|
||||
identifier: value.batch_header.batch_hash().to_string(),
|
||||
@@ -91,9 +97,9 @@ impl TryFrom<BatchProvingTask> for ProvingTask {
|
||||
aggregated_proofs: value
|
||||
.chunk_proofs
|
||||
.into_iter()
|
||||
.map(|w_proof| w_proof.proof.into_root_proof().expect("expect root proof"))
|
||||
.map(|w_proof| w_proof.proof.into_stark_proof().expect("expect root proof"))
|
||||
.collect(),
|
||||
serialized_witness: vec![to_rkyv_bytes::<RancorError>(&witness)?.into_vec()],
|
||||
serialized_witness: vec![serialized_witness],
|
||||
vk: Vec::new(),
|
||||
})
|
||||
}
|
||||
@@ -161,10 +167,10 @@ impl BatchProvingTask {
|
||||
assert_eq!(p, kzg_proof);
|
||||
}
|
||||
|
||||
let point_eval_witness = PointEvalWitness {
|
||||
kzg_commitment: kzg_commitment.into_inner(),
|
||||
kzg_proof: kzg_proof.into_inner(),
|
||||
};
|
||||
let point_eval_witness = Some(build_point_eval_witness(
|
||||
kzg_commitment.into_inner(),
|
||||
kzg_proof.into_inner(),
|
||||
));
|
||||
|
||||
let reference_header = match fork_name {
|
||||
ForkName::EuclidV1 => ReferenceHeader::V6(*self.batch_header.must_v6_header()),
|
||||
@@ -183,6 +189,7 @@ impl BatchProvingTask {
|
||||
blob_bytes: self.blob_bytes.clone(),
|
||||
reference_header,
|
||||
point_eval_witness,
|
||||
version: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -192,12 +199,7 @@ impl BatchProvingTask {
|
||||
// 1. generate data for metadata from the witness
|
||||
// 2. validate every adjacent proof pair
|
||||
let witness = self.build_guest_input();
|
||||
let archieved = ToArchievedWitness::create(&witness)
|
||||
.map_err(|e| eyre::eyre!("archieve batch witness fail: {e}"))?;
|
||||
let archieved_witness = archieved
|
||||
.access()
|
||||
.map_err(|e| eyre::eyre!("access archieved batch witness fail: {e}"))?;
|
||||
let metadata: BatchInfo = archieved_witness.into();
|
||||
let metadata = BatchInfo::from(&witness);
|
||||
|
||||
super::check_aggregation_proofs(self.chunk_proofs.as_slice(), fork_name)?;
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ pub mod base64 {
|
||||
pub mod point_eval {
|
||||
use c_kzg;
|
||||
use sbv_primitives::{types::eips::eip4844::BLS_MODULUS, B256 as H256, U256};
|
||||
use scroll_zkvm_types::util::sha256_rv32;
|
||||
use scroll_zkvm_types::utils::sha256_rv32;
|
||||
|
||||
/// Given the blob-envelope, translate it to a fixed size EIP-4844 blob.
|
||||
///
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
use crate::proofs::BatchProof;
|
||||
use eyre::Result;
|
||||
use scroll_zkvm_types::{
|
||||
bundle::{BundleInfo, BundleWitness, ToArchievedWitness},
|
||||
bundle::{BundleInfo, BundleWitness},
|
||||
public_inputs::ForkName,
|
||||
task::ProvingTask,
|
||||
utils::{to_rkyv_bytes, RancorError},
|
||||
};
|
||||
|
||||
/// Message indicating a sanity check failure.
|
||||
@@ -40,6 +41,7 @@ impl BundleProvingTask {
|
||||
|
||||
fn build_guest_input(&self) -> BundleWitness {
|
||||
BundleWitness {
|
||||
version: 0,
|
||||
batch_proofs: self.batch_proofs.iter().map(|proof| proof.into()).collect(),
|
||||
batch_infos: self
|
||||
.batch_proofs
|
||||
@@ -56,12 +58,7 @@ impl BundleProvingTask {
|
||||
// 1. generate data for metadata from the witness
|
||||
// 2. validate every adjacent proof pair
|
||||
let witness = self.build_guest_input();
|
||||
let archieved = ToArchievedWitness::create(&witness)
|
||||
.map_err(|e| eyre::eyre!("archieve bundle witness fail: {e}"))?;
|
||||
let archieved_witness = archieved
|
||||
.access()
|
||||
.map_err(|e| eyre::eyre!("access archieved bundle witness fail: {e}"))?;
|
||||
let metadata: BundleInfo = archieved_witness.into();
|
||||
let metadata = BundleInfo::from(&witness);
|
||||
|
||||
super::check_aggregation_proofs(self.batch_proofs.as_slice(), fork_name)?;
|
||||
|
||||
@@ -74,6 +71,12 @@ impl TryFrom<BundleProvingTask> for ProvingTask {
|
||||
|
||||
fn try_from(value: BundleProvingTask) -> Result<Self> {
|
||||
let witness = value.build_guest_input();
|
||||
let serialized_witness = if crate::witness_use_legacy_mode() {
|
||||
//to_rkyv_bytes::<RancorError>(&witness)?.into_vec()
|
||||
unimplemented!();
|
||||
} else {
|
||||
super::encode_task_to_witness(&witness)?
|
||||
};
|
||||
|
||||
Ok(ProvingTask {
|
||||
identifier: value.identifier(),
|
||||
@@ -81,9 +84,9 @@ impl TryFrom<BundleProvingTask> for ProvingTask {
|
||||
aggregated_proofs: value
|
||||
.batch_proofs
|
||||
.into_iter()
|
||||
.map(|w_proof| w_proof.proof.into_root_proof().expect("expect root proof"))
|
||||
.map(|w_proof| w_proof.proof.into_stark_proof().expect("expect root proof"))
|
||||
.collect(),
|
||||
serialized_witness: vec![witness.rkyv_serialize(None)?.to_vec()],
|
||||
serialized_witness: vec![serialized_witness],
|
||||
vk: Vec::new(),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
use super::chunk_interpreter::*;
|
||||
use eyre::Result;
|
||||
use sbv_primitives::{types::BlockWitness, B256};
|
||||
use sbv_core::BlockWitness;
|
||||
use sbv_primitives::B256;
|
||||
use scroll_zkvm_types::{
|
||||
chunk::{execute, ChunkInfo, ChunkWitness, ToArchievedWitness},
|
||||
chunk::{execute, ChunkInfo, ChunkWitness, LegacyChunkWitness},
|
||||
task::ProvingTask,
|
||||
utils::{to_rkyv_bytes, RancorError},
|
||||
};
|
||||
|
||||
/// The type aligned with coordinator's defination
|
||||
@@ -66,12 +68,18 @@ impl TryFrom<ChunkProvingTask> for ProvingTask {
|
||||
|
||||
fn try_from(value: ChunkProvingTask) -> Result<Self> {
|
||||
let witness = value.build_guest_input();
|
||||
let serialized_witness = if crate::witness_use_legacy_mode() {
|
||||
let legacy_witness = LegacyChunkWitness::from(witness);
|
||||
to_rkyv_bytes::<RancorError>(&legacy_witness)?.into_vec()
|
||||
} else {
|
||||
super::encode_task_to_witness(&witness)?
|
||||
};
|
||||
|
||||
Ok(ProvingTask {
|
||||
identifier: value.identifier(),
|
||||
fork_name: value.fork_name,
|
||||
aggregated_proofs: Vec::new(),
|
||||
serialized_witness: vec![witness.rkyv_serialize(None)?.to_vec()],
|
||||
serialized_witness: vec![serialized_witness],
|
||||
vk: Vec::new(),
|
||||
})
|
||||
}
|
||||
@@ -83,7 +91,7 @@ impl ChunkProvingTask {
|
||||
let num_txs = self
|
||||
.block_witnesses
|
||||
.iter()
|
||||
.map(|b| b.transaction.len())
|
||||
.map(|b| b.transactions.len())
|
||||
.sum::<usize>();
|
||||
let total_gas_used = self
|
||||
.block_witnesses
|
||||
@@ -119,9 +127,11 @@ impl ChunkProvingTask {
|
||||
|
||||
fn build_guest_input(&self) -> ChunkWitness {
|
||||
ChunkWitness::new(
|
||||
0,
|
||||
&self.block_witnesses,
|
||||
self.prev_msg_queue_hash,
|
||||
self.fork_name.to_lowercase().as_str().into(),
|
||||
None,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -131,18 +141,14 @@ impl ChunkProvingTask {
|
||||
|
||||
pub fn precheck_and_build_metadata(&self) -> Result<ChunkInfo> {
|
||||
let witness = self.build_guest_input();
|
||||
let archieved = ToArchievedWitness::create(&witness)
|
||||
.map_err(|e| eyre::eyre!("archieve chunk witness fail: {e}"))?;
|
||||
let archieved_witness = archieved
|
||||
.access()
|
||||
.map_err(|e| eyre::eyre!("access archieved chunk witness fail: {e}"))?;
|
||||
|
||||
let ret = ChunkInfo::try_from(archieved_witness).map_err(|e| eyre::eyre!("{e}"))?;
|
||||
let ret = ChunkInfo::try_from(witness).map_err(|e| eyre::eyre!("{e}"))?;
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
/// this method check the validate of current task (there may be missing storage node)
|
||||
/// and try fixing it until everything is ok
|
||||
#[deprecated]
|
||||
pub fn prepare_task_via_interpret(
|
||||
&mut self,
|
||||
interpreter: impl ChunkInterpreter,
|
||||
@@ -166,13 +172,8 @@ impl ChunkProvingTask {
|
||||
let mut attempts = 0;
|
||||
loop {
|
||||
let witness = self.build_guest_input();
|
||||
let archieved = ToArchievedWitness::create(&witness)
|
||||
.map_err(|e| eyre::eyre!("archieve chunk witness fail: {e}"))?;
|
||||
let archieved_witness = archieved
|
||||
.access()
|
||||
.map_err(|e| eyre::eyre!("access archieved chunk witness fail: {e}"))?;
|
||||
|
||||
match execute(archieved_witness) {
|
||||
match execute(witness) {
|
||||
Ok(_) => return Ok(()),
|
||||
Err(e) => {
|
||||
if let Some(caps) = err_parse_re.captures(&e) {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use eyre::Result;
|
||||
use sbv_primitives::{types::BlockWitness, Bytes, B256};
|
||||
use sbv_core::BlockWitness;
|
||||
use sbv_primitives::{Bytes, B256};
|
||||
|
||||
/// An interpreter which is cirtical in translating chunk data
|
||||
/// since we need to grep block witness and storage node data
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
#![allow(static_mut_refs)]
|
||||
|
||||
mod euclidv2;
|
||||
use euclidv2::EuclidV2Verifier;
|
||||
mod universal;
|
||||
use eyre::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
@@ -9,6 +8,7 @@ use std::{
|
||||
path::Path,
|
||||
sync::{Arc, Mutex, OnceLock},
|
||||
};
|
||||
use universal::Verifier;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum TaskType {
|
||||
@@ -61,7 +61,7 @@ pub fn init(config: VerifierConfig) {
|
||||
for cfg in &config.circuits {
|
||||
let canonical_fork_name = cfg.fork_name.to_lowercase();
|
||||
|
||||
let verifier = EuclidV2Verifier::new(&cfg.assets_path, canonical_fork_name.as_str().into());
|
||||
let verifier = Verifier::new(&cfg.assets_path, canonical_fork_name.as_str().into());
|
||||
let ret = verifiers.insert(canonical_fork_name, Arc::new(Mutex::new(verifier)));
|
||||
assert!(
|
||||
ret.is_none(),
|
||||
|
||||
@@ -7,59 +7,51 @@ use crate::{
|
||||
utils::panic_catch,
|
||||
};
|
||||
use scroll_zkvm_types::public_inputs::ForkName;
|
||||
use scroll_zkvm_verifier_euclid::verifier::UniversalVerifier;
|
||||
use scroll_zkvm_verifier::verifier::UniversalVerifier;
|
||||
use std::path::Path;
|
||||
|
||||
pub struct EuclidV2Verifier {
|
||||
pub struct Verifier {
|
||||
verifier: UniversalVerifier,
|
||||
fork: ForkName,
|
||||
}
|
||||
|
||||
impl EuclidV2Verifier {
|
||||
impl Verifier {
|
||||
pub fn new(assets_dir: &str, fork: ForkName) -> Self {
|
||||
let verifier_bin = Path::new(assets_dir).join("verifier.bin");
|
||||
let config = Path::new(assets_dir).join("root-verifier-vm-config");
|
||||
let exe = Path::new(assets_dir).join("root-verifier-committed-exe");
|
||||
let verifier_bin = Path::new(assets_dir);
|
||||
|
||||
Self {
|
||||
verifier: UniversalVerifier::setup(&config, &exe, &verifier_bin)
|
||||
.expect("Setting up chunk verifier"),
|
||||
verifier: UniversalVerifier::setup(verifier_bin).expect("Setting up chunk verifier"),
|
||||
fork,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProofVerifier for EuclidV2Verifier {
|
||||
impl ProofVerifier for Verifier {
|
||||
fn verify(&self, task_type: super::TaskType, proof: &[u8]) -> Result<bool> {
|
||||
panic_catch(|| match task_type {
|
||||
TaskType::Chunk => {
|
||||
let proof = serde_json::from_slice::<ChunkProof>(proof).unwrap();
|
||||
if !proof.pi_hash_check(self.fork) {
|
||||
return false;
|
||||
}
|
||||
assert!(proof.pi_hash_check(self.fork));
|
||||
self.verifier
|
||||
.verify_proof(proof.as_root_proof(), &proof.vk)
|
||||
.verify_stark_proof(proof.as_root_proof(), &proof.vk)
|
||||
.unwrap()
|
||||
}
|
||||
TaskType::Batch => {
|
||||
let proof = serde_json::from_slice::<BatchProof>(proof).unwrap();
|
||||
if !proof.pi_hash_check(self.fork) {
|
||||
return false;
|
||||
}
|
||||
assert!(proof.pi_hash_check(self.fork));
|
||||
self.verifier
|
||||
.verify_proof(proof.as_root_proof(), &proof.vk)
|
||||
.verify_stark_proof(proof.as_root_proof(), &proof.vk)
|
||||
.unwrap()
|
||||
}
|
||||
TaskType::Bundle => {
|
||||
let proof = serde_json::from_slice::<BundleProof>(proof).unwrap();
|
||||
if !proof.pi_hash_check(self.fork) {
|
||||
return false;
|
||||
}
|
||||
assert!(proof.pi_hash_check(self.fork));
|
||||
let vk = proof.vk.clone();
|
||||
let evm_proof = proof.into_evm_proof();
|
||||
self.verifier.verify_proof_evm(&evm_proof, &vk).unwrap()
|
||||
self.verifier.verify_evm_proof(&evm_proof, &vk).unwrap()
|
||||
}
|
||||
})
|
||||
.map(|_| true)
|
||||
.map_err(|err_str: String| eyre::eyre!("{err_str}"))
|
||||
}
|
||||
|
||||
@@ -153,17 +153,12 @@ pub unsafe extern "C" fn gen_universal_task(
|
||||
expected_vk: *const u8,
|
||||
expected_vk_len: usize,
|
||||
) -> HandlingResult {
|
||||
let mut interpreter = None;
|
||||
let task_json = if task_type == TaskType::Chunk as i32 {
|
||||
let pre_task_str = c_char_to_str(task);
|
||||
let cli = l2geth::get_client();
|
||||
match libzkp::checkout_chunk_task(pre_task_str, cli) {
|
||||
Ok(str) => {
|
||||
interpreter.replace(cli);
|
||||
str
|
||||
}
|
||||
Ok(str) => str,
|
||||
Err(e) => {
|
||||
println!("gen_universal_task failed at pre interpret step, error: {e}");
|
||||
tracing::error!("gen_universal_task failed at pre interpret step, error: {e}");
|
||||
return failed_handling_result();
|
||||
}
|
||||
@@ -178,13 +173,8 @@ pub unsafe extern "C" fn gen_universal_task(
|
||||
&[]
|
||||
};
|
||||
|
||||
let ret = libzkp::gen_universal_task(
|
||||
task_type,
|
||||
&task_json,
|
||||
c_char_to_str(fork_name),
|
||||
expected_vk,
|
||||
interpreter,
|
||||
);
|
||||
let ret =
|
||||
libzkp::gen_universal_task(task_type, &task_json, c_char_to_str(fork_name), expected_vk);
|
||||
|
||||
if let Ok((pi_hash, meta_json, task_json)) = ret {
|
||||
let expected_pi_hash = pi_hash.0.map(|byte| byte as c_char);
|
||||
@@ -255,3 +245,10 @@ pub unsafe extern "C" fn release_string(ptr: *mut c_char) {
|
||||
let _ = CString::from_raw(ptr);
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn set_dynamic_feature(feats: *const c_char) {
|
||||
let feats_str = c_char_to_str(feats);
|
||||
libzkp::set_dynamic_feature(feats_str);
|
||||
}
|
||||
|
||||
@@ -7,8 +7,8 @@ edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
scroll-zkvm-types.workspace = true
|
||||
scroll-zkvm-prover-euclid.workspace = true
|
||||
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", branch = "refactor/scroll" }
|
||||
scroll-zkvm-prover.workspace = true
|
||||
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", rev = "4c36ab2" }
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
once_cell.workspace =true
|
||||
@@ -17,8 +17,9 @@ tiny-keccak = { workspace = true, features = ["sha3", "keccak"] }
|
||||
eyre.workspace = true
|
||||
|
||||
futures = "0.3.30"
|
||||
futures-util = "0.3"
|
||||
|
||||
reqwest = { version = "0.12.4", features = ["gzip"] }
|
||||
reqwest = { version = "0.12.4", features = ["gzip", "stream"] }
|
||||
reqwest-middleware = "0.3"
|
||||
reqwest-retry = "0.5"
|
||||
hex = "0.4.3"
|
||||
@@ -30,5 +31,9 @@ sled = "0.34.7"
|
||||
http = "1.1.0"
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
ctor = "0.2.8"
|
||||
url = "2.5.4"
|
||||
url = { version = "2.5.4", features = ["serde",] }
|
||||
serde_bytes = "0.11.15"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
cuda = ["scroll-zkvm-prover/cuda"]
|
||||
7
crates/prover-bin/assets_url_preset.json
Normal file
7
crates/prover-bin/assets_url_preset.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"feynman": {
|
||||
"b68fdc3f28a5ce006280980df70cd3447e56913e5bca6054603ba85f0794c23a6618ea25a7991845bbc5fd571670ee47379ba31ace92d345bca59702a0d4112d": "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/0.5.2/chunk/",
|
||||
"9a3f66370f11e3303f1a1248921025104e83253efea43a70d221cf4e15fc145bf2be2f4468d1ac4a70e7682babb1c60417e21c7633d4b55b58f44703ec82b05a": "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/0.5.2/batch/",
|
||||
"1f8627277e1c1f6e1cc70c03e6fde06929e5ea27ca5b1d56e23b235dfeda282e22c0e5294bcb1b3a9def836f8d0f18612a9860629b9497292976ca11844b7e73": "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/0.5.2/bundle/"
|
||||
}
|
||||
}
|
||||
@@ -34,11 +34,6 @@ struct Args {
|
||||
|
||||
#[derive(Subcommand, Debug)]
|
||||
enum Commands {
|
||||
/// Dump vk of this prover
|
||||
Dump {
|
||||
/// path to save the verifier's asset
|
||||
asset_path: String,
|
||||
},
|
||||
Handle {
|
||||
/// path to save the verifier's asset
|
||||
task_path: String,
|
||||
@@ -64,16 +59,10 @@ async fn main() -> eyre::Result<()> {
|
||||
}
|
||||
|
||||
let cfg = LocalProverConfig::from_file(args.config_file)?;
|
||||
let default_fork_name = cfg.circuits.keys().next().unwrap().clone();
|
||||
let sdk_config = cfg.sdk_config.clone();
|
||||
let local_prover = LocalProver::new(cfg.clone());
|
||||
|
||||
match args.command {
|
||||
Some(Commands::Dump { asset_path }) => {
|
||||
let fork_name = args.fork_name.unwrap_or(default_fork_name);
|
||||
println!("dump assets for {fork_name} into {asset_path}");
|
||||
local_prover.dump_verifier_assets(&fork_name, asset_path.as_ref())?;
|
||||
}
|
||||
Some(Commands::Handle { task_path }) => {
|
||||
let file = File::open(Path::new(&task_path))?;
|
||||
let reader = BufReader::new(file);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::zk_circuits_handler::{euclidV2::EuclidV2Handler, CircuitsHandler};
|
||||
use crate::zk_circuits_handler::{universal::UniversalHandler, CircuitsHandler};
|
||||
use async_trait::async_trait;
|
||||
use eyre::Result;
|
||||
use scroll_proving_sdk::{
|
||||
@@ -16,12 +16,111 @@ use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fs::File,
|
||||
path::Path,
|
||||
sync::{Arc, OnceLock},
|
||||
path::{Path, PathBuf},
|
||||
sync::{Arc, LazyLock},
|
||||
time::{SystemTime, UNIX_EPOCH},
|
||||
};
|
||||
use tokio::{runtime::Handle, sync::Mutex, task::JoinHandle};
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct AssetsLocationData {
|
||||
/// the base url to form a general downloading url for an asset, MUST HAVE A TRAILING SLASH
|
||||
pub base_url: url::Url,
|
||||
#[serde(default)]
|
||||
/// a altered url for specififed vk
|
||||
pub asset_detours: HashMap<String, url::Url>,
|
||||
}
|
||||
|
||||
impl AssetsLocationData {
|
||||
pub fn gen_asset_url(&self, vk_as_path: &str, proof_type: ProofType) -> Result<url::Url> {
|
||||
Ok(self.base_url.join(
|
||||
match proof_type {
|
||||
ProofType::Chunk => format!("chunk/{vk_as_path}/"),
|
||||
ProofType::Batch => format!("batch/{vk_as_path}/"),
|
||||
ProofType::Bundle => format!("bundle/{vk_as_path}/"),
|
||||
t => eyre::bail!("unrecognized proof type: {}", t as u8),
|
||||
}
|
||||
.as_str(),
|
||||
)?)
|
||||
}
|
||||
|
||||
pub fn validate(&self) -> Result<()> {
|
||||
if !self.base_url.path().ends_with('/') {
|
||||
eyre::bail!(
|
||||
"base_url must have a trailing slash, got: {}",
|
||||
self.base_url
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_asset(
|
||||
&self,
|
||||
vk: &str,
|
||||
url_base: &url::Url,
|
||||
base_path: impl AsRef<Path>,
|
||||
) -> Result<PathBuf> {
|
||||
let download_files = ["app.vmexe", "openvm.toml"];
|
||||
|
||||
// Step 1: Create a local path for storage
|
||||
let storage_path = base_path.as_ref().join(vk);
|
||||
std::fs::create_dir_all(&storage_path)?;
|
||||
|
||||
// Step 2 & 3: Download each file if needed
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
for filename in download_files.iter() {
|
||||
let local_file_path = storage_path.join(filename);
|
||||
let download_url = url_base.join(filename)?;
|
||||
|
||||
// Check if file already exists
|
||||
if local_file_path.exists() {
|
||||
// Get file metadata to check size
|
||||
if let Ok(metadata) = std::fs::metadata(&local_file_path) {
|
||||
// Make a HEAD request to get remote file size
|
||||
|
||||
if let Ok(head_resp) = client.head(download_url.clone()).send().await {
|
||||
if let Some(content_length) = head_resp.headers().get("content-length") {
|
||||
if let Ok(remote_size) =
|
||||
content_length.to_str().unwrap_or("0").parse::<u64>()
|
||||
{
|
||||
// If sizes match, skip download
|
||||
if metadata.len() == remote_size {
|
||||
println!("File {} already exists with matching size, skipping download", filename);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!("Downloading {} from {}", filename, download_url);
|
||||
|
||||
let response = client.get(download_url).send().await?;
|
||||
if !response.status().is_success() {
|
||||
eyre::bail!(
|
||||
"Failed to download {}: HTTP status {}",
|
||||
filename,
|
||||
response.status()
|
||||
);
|
||||
}
|
||||
|
||||
// Stream the content directly to file instead of loading into memory
|
||||
let mut file = std::fs::File::create(&local_file_path)?;
|
||||
let mut stream = response.bytes_stream();
|
||||
|
||||
use futures_util::StreamExt;
|
||||
while let Some(chunk) = stream.next().await {
|
||||
std::io::Write::write_all(&mut file, &chunk?)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Step 4: Return the storage path
|
||||
Ok(storage_path)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct LocalProverConfig {
|
||||
pub sdk_config: SdkConfig,
|
||||
@@ -45,7 +144,11 @@ impl LocalProverConfig {
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct CircuitConfig {
|
||||
pub hard_fork_name: String,
|
||||
/// The path to save assets for a specified hard fork phase
|
||||
pub workspace_path: String,
|
||||
#[serde(flatten)]
|
||||
/// The location data for dynamic loading
|
||||
pub location_data: AssetsLocationData,
|
||||
/// cached vk value to save some initial cost, for debugging only
|
||||
#[serde(default)]
|
||||
pub vks: HashMap<ProofType, String>,
|
||||
@@ -56,7 +159,7 @@ pub struct LocalProver {
|
||||
next_task_id: u64,
|
||||
current_task: Option<JoinHandle<Result<String>>>,
|
||||
|
||||
handlers: HashMap<String, OnceLock<Arc<dyn CircuitsHandler>>>,
|
||||
handlers: HashMap<String, Arc<dyn CircuitsHandler>>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
@@ -64,24 +167,15 @@ impl ProvingService for LocalProver {
|
||||
fn is_local(&self) -> bool {
|
||||
true
|
||||
}
|
||||
async fn get_vks(&self, req: GetVkRequest) -> GetVkResponse {
|
||||
let mut vks = vec![];
|
||||
for (hard_fork_name, cfg) in self.config.circuits.iter() {
|
||||
for proof_type in &req.proof_types {
|
||||
if let Some(vk) = cfg.vks.get(proof_type) {
|
||||
vks.push(vk.clone())
|
||||
} else {
|
||||
let handler = self.get_or_init_handler(hard_fork_name);
|
||||
vks.push(handler.get_vk(*proof_type).await);
|
||||
}
|
||||
}
|
||||
async fn get_vks(&self, _: GetVkRequest) -> GetVkResponse {
|
||||
// get vk has been deprecated in new prover with dynamic asset loading scheme
|
||||
GetVkResponse {
|
||||
vks: vec![],
|
||||
error: None,
|
||||
}
|
||||
|
||||
GetVkResponse { vks, error: None }
|
||||
}
|
||||
async fn prove(&mut self, req: ProveRequest) -> ProveResponse {
|
||||
let handler = self.get_or_init_handler(&req.hard_fork_name);
|
||||
match self.do_prove(req, handler).await {
|
||||
match self.do_prove(req).await {
|
||||
Ok(resp) => resp,
|
||||
Err(e) => ProveResponse {
|
||||
status: TaskStatus::Failed,
|
||||
@@ -132,34 +226,91 @@ impl ProvingService for LocalProver {
|
||||
}
|
||||
}
|
||||
|
||||
static GLOBAL_ASSET_URLS: LazyLock<HashMap<String, HashMap<String, url::Url>>> =
|
||||
LazyLock::new(|| {
|
||||
const ASSETS_JSON: &str = include_str!("../assets_url_preset.json");
|
||||
serde_json::from_str(ASSETS_JSON).expect("Failed to parse assets_url_preset.json")
|
||||
});
|
||||
|
||||
impl LocalProver {
|
||||
pub fn new(config: LocalProverConfig) -> Self {
|
||||
let handlers = config
|
||||
.circuits
|
||||
.keys()
|
||||
.map(|k| (k.clone(), OnceLock::new()))
|
||||
.collect();
|
||||
pub fn new(mut config: LocalProverConfig) -> Self {
|
||||
for (fork_name, circuit_config) in config.circuits.iter_mut() {
|
||||
// validate each base url
|
||||
circuit_config.location_data.validate().unwrap();
|
||||
let mut template_url_mapping = GLOBAL_ASSET_URLS
|
||||
.get(&fork_name.to_lowercase())
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
|
||||
// apply default settings in template
|
||||
for (key, url) in circuit_config.location_data.asset_detours.drain() {
|
||||
template_url_mapping.insert(key, url);
|
||||
}
|
||||
|
||||
circuit_config.location_data.asset_detours = template_url_mapping;
|
||||
|
||||
// validate each detours url
|
||||
for url in circuit_config.location_data.asset_detours.values() {
|
||||
assert!(
|
||||
url.path().ends_with('/'),
|
||||
"url {} must be end with /",
|
||||
url.as_str()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Self {
|
||||
config,
|
||||
next_task_id: 0,
|
||||
current_task: None,
|
||||
handlers,
|
||||
handlers: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn do_prove(
|
||||
&mut self,
|
||||
req: ProveRequest,
|
||||
handler: Arc<dyn CircuitsHandler>,
|
||||
) -> Result<ProveResponse> {
|
||||
async fn do_prove(&mut self, req: ProveRequest) -> Result<ProveResponse> {
|
||||
self.next_task_id += 1;
|
||||
let duration = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
|
||||
let created_at = duration.as_secs() as f64 + duration.subsec_nanos() as f64 * 1e-9;
|
||||
|
||||
let req_clone = req.clone();
|
||||
let prover_task = UniversalHandler::get_task_from_input(&req.input)?;
|
||||
let vk = hex::encode(&prover_task.vk);
|
||||
let handler = if let Some(handler) = self.handlers.get(&vk) {
|
||||
handler.clone()
|
||||
} else {
|
||||
let base_config = self
|
||||
.config
|
||||
.circuits
|
||||
.get(&req.hard_fork_name)
|
||||
.ok_or_else(|| {
|
||||
eyre::eyre!(
|
||||
"coordinator sent unexpected forkname {}",
|
||||
req.hard_fork_name
|
||||
)
|
||||
})?;
|
||||
let url_base = if let Some(url) = base_config.location_data.asset_detours.get(&vk) {
|
||||
url.clone()
|
||||
} else {
|
||||
base_config
|
||||
.location_data
|
||||
.gen_asset_url(&vk, req.proof_type)?
|
||||
};
|
||||
let asset_path = base_config
|
||||
.location_data
|
||||
.get_asset(&vk, &url_base, &base_config.workspace_path)
|
||||
.await?;
|
||||
let circuits_handler = Arc::new(Mutex::new(UniversalHandler::new(
|
||||
&asset_path,
|
||||
req.proof_type,
|
||||
)?));
|
||||
self.handlers.insert(vk, circuits_handler.clone());
|
||||
circuits_handler
|
||||
};
|
||||
|
||||
let handle = Handle::current();
|
||||
let task_handle =
|
||||
tokio::task::spawn_blocking(move || handle.block_on(handler.get_proof_data(req_clone)));
|
||||
let is_evm = req.proof_type == ProofType::Bundle;
|
||||
let task_handle = tokio::task::spawn_blocking(move || {
|
||||
handle.block_on(handler.get_proof_data(&prover_task, is_evm))
|
||||
});
|
||||
self.current_task = Some(task_handle);
|
||||
|
||||
Ok(ProveResponse {
|
||||
@@ -173,77 +324,4 @@ impl LocalProver {
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
||||
fn get_or_init_handler(&self, hard_fork_name: &str) -> Arc<dyn CircuitsHandler> {
|
||||
let lk = self
|
||||
.handlers
|
||||
.get(hard_fork_name)
|
||||
.expect("coordinator should never sent unexpected forkname");
|
||||
lk.get_or_init(|| self.new_handler(hard_fork_name)).clone()
|
||||
}
|
||||
|
||||
pub fn new_handler(&self, hard_fork_name: &str) -> Arc<dyn CircuitsHandler> {
|
||||
// if we got assigned a task for an unknown hard fork, there is something wrong in the
|
||||
// coordinator
|
||||
let config = self.config.circuits.get(hard_fork_name).unwrap();
|
||||
|
||||
match hard_fork_name {
|
||||
// The new EuclidV2Handler is a universal handler
|
||||
// We can add other handler implements if needed
|
||||
"some future forkname" => unreachable!(),
|
||||
_ => Arc::new(Arc::new(Mutex::new(EuclidV2Handler::new(config))))
|
||||
as Arc<dyn CircuitsHandler>,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn dump_verifier_assets(&self, hard_fork_name: &str, out_path: &Path) -> Result<()> {
|
||||
let config = self
|
||||
.config
|
||||
.circuits
|
||||
.get(hard_fork_name)
|
||||
.ok_or_else(|| eyre::eyre!("no corresponding config for fork {hard_fork_name}"))?;
|
||||
|
||||
if !config.vks.is_empty() {
|
||||
eyre::bail!("clean vks cache first or we will have wrong dumped vk");
|
||||
}
|
||||
|
||||
let workspace_path = &config.workspace_path;
|
||||
let universal_prover = EuclidV2Handler::new(config);
|
||||
let _ = universal_prover
|
||||
.get_prover()
|
||||
.dump_universal_verifier(Some(out_path))?;
|
||||
|
||||
#[derive(Debug, serde::Serialize)]
|
||||
struct VKDump {
|
||||
pub chunk_vk: String,
|
||||
pub batch_vk: String,
|
||||
pub bundle_vk: String,
|
||||
}
|
||||
|
||||
let dump = VKDump {
|
||||
chunk_vk: universal_prover.get_vk_and_cache(ProofType::Chunk),
|
||||
batch_vk: universal_prover.get_vk_and_cache(ProofType::Batch),
|
||||
bundle_vk: universal_prover.get_vk_and_cache(ProofType::Bundle),
|
||||
};
|
||||
|
||||
let f = File::create(out_path.join("openVmVk.json"))?;
|
||||
serde_json::to_writer(f, &dump)?;
|
||||
|
||||
// Copy verifier.bin from workspace bundle directory to output path
|
||||
let bundle_verifier_path = Path::new(workspace_path)
|
||||
.join("bundle")
|
||||
.join("verifier.bin");
|
||||
if bundle_verifier_path.exists() {
|
||||
let dest_path = out_path.join("verifier.bin");
|
||||
std::fs::copy(&bundle_verifier_path, &dest_path)
|
||||
.map_err(|e| eyre::eyre!("Failed to copy verifier.bin: {}", e))?;
|
||||
} else {
|
||||
eprintln!(
|
||||
"Warning: verifier.bin not found at {:?}",
|
||||
bundle_verifier_path
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
|
||||
@@ -1,65 +1,13 @@
|
||||
//pub mod euclid;
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub mod euclidV2;
|
||||
pub mod universal;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use eyre::Result;
|
||||
use scroll_proving_sdk::prover::{proving_service::ProveRequest, ProofType};
|
||||
use scroll_zkvm_prover_euclid::ProverConfig;
|
||||
use std::path::Path;
|
||||
use scroll_zkvm_types::ProvingTask;
|
||||
|
||||
#[async_trait]
|
||||
pub trait CircuitsHandler: Sync + Send {
|
||||
async fn get_vk(&self, task_type: ProofType) -> String;
|
||||
|
||||
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String>;
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub(crate) enum Phase {
|
||||
EuclidV2,
|
||||
}
|
||||
|
||||
impl Phase {
|
||||
pub fn phase_spec_chunk(&self, workspace_path: &Path) -> ProverConfig {
|
||||
let dir_cache = Some(workspace_path.join("cache"));
|
||||
let path_app_exe = workspace_path.join("chunk/app.vmexe");
|
||||
let path_app_config = workspace_path.join("chunk/openvm.toml");
|
||||
let segment_len = Some((1 << 22) - 100);
|
||||
ProverConfig {
|
||||
dir_cache,
|
||||
path_app_config,
|
||||
path_app_exe,
|
||||
segment_len,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn phase_spec_batch(&self, workspace_path: &Path) -> ProverConfig {
|
||||
let dir_cache = Some(workspace_path.join("cache"));
|
||||
let path_app_exe = workspace_path.join("batch/app.vmexe");
|
||||
let path_app_config = workspace_path.join("batch/openvm.toml");
|
||||
let segment_len = Some((1 << 22) - 100);
|
||||
ProverConfig {
|
||||
dir_cache,
|
||||
path_app_config,
|
||||
path_app_exe,
|
||||
segment_len,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn phase_spec_bundle(&self, workspace_path: &Path) -> ProverConfig {
|
||||
let dir_cache = Some(workspace_path.join("cache"));
|
||||
let path_app_config = workspace_path.join("bundle/openvm.toml");
|
||||
let segment_len = Some((1 << 22) - 100);
|
||||
ProverConfig {
|
||||
dir_cache,
|
||||
path_app_config,
|
||||
segment_len,
|
||||
path_app_exe: workspace_path.join("bundle/app.vmexe"),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
async fn get_proof_data(&self, u_task: &ProvingTask, need_snark: bool) -> Result<String>;
|
||||
}
|
||||
|
||||
@@ -1,144 +0,0 @@
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
use super::CircuitsHandler;
|
||||
use anyhow::{anyhow, Result};
|
||||
use async_trait::async_trait;
|
||||
use scroll_proving_sdk::prover::{proving_service::ProveRequest, ProofType};
|
||||
use scroll_zkvm_prover_euclid::{
|
||||
task::{batch::BatchProvingTask, bundle::BundleProvingTask, chunk::ChunkProvingTask},
|
||||
BatchProver, BundleProverEuclidV1, ChunkProver, ProverConfig,
|
||||
};
|
||||
use tokio::sync::Mutex;
|
||||
pub struct EuclidHandler {
|
||||
chunk_prover: ChunkProver,
|
||||
batch_prover: BatchProver,
|
||||
bundle_prover: BundleProverEuclidV1,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub(crate) enum Phase {
|
||||
EuclidV1,
|
||||
EuclidV2,
|
||||
}
|
||||
|
||||
impl Phase {
|
||||
pub fn as_str(&self) -> &str {
|
||||
match self {
|
||||
Phase::EuclidV1 => "euclidv1",
|
||||
Phase::EuclidV2 => "euclidv2",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn phase_spec_chunk(&self, workspace_path: &Path) -> ProverConfig {
|
||||
let dir_cache = Some(workspace_path.join("cache"));
|
||||
let path_app_exe = workspace_path.join("chunk/app.vmexe");
|
||||
let path_app_config = workspace_path.join("chunk/openvm.toml");
|
||||
let segment_len = Some((1 << 22) - 100);
|
||||
ProverConfig {
|
||||
dir_cache,
|
||||
path_app_config,
|
||||
path_app_exe,
|
||||
segment_len,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn phase_spec_batch(&self, workspace_path: &Path) -> ProverConfig {
|
||||
let dir_cache = Some(workspace_path.join("cache"));
|
||||
let path_app_exe = workspace_path.join("batch/app.vmexe");
|
||||
let path_app_config = workspace_path.join("batch/openvm.toml");
|
||||
let segment_len = Some((1 << 22) - 100);
|
||||
ProverConfig {
|
||||
dir_cache,
|
||||
path_app_config,
|
||||
path_app_exe,
|
||||
segment_len,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn phase_spec_bundle(&self, workspace_path: &Path) -> ProverConfig {
|
||||
let dir_cache = Some(workspace_path.join("cache"));
|
||||
let path_app_config = workspace_path.join("bundle/openvm.toml");
|
||||
let segment_len = Some((1 << 22) - 100);
|
||||
match self {
|
||||
Phase::EuclidV1 => ProverConfig {
|
||||
dir_cache,
|
||||
path_app_config,
|
||||
segment_len,
|
||||
path_app_exe: workspace_path.join("bundle/app_euclidv1.vmexe"),
|
||||
..Default::default()
|
||||
},
|
||||
Phase::EuclidV2 => ProverConfig {
|
||||
dir_cache,
|
||||
path_app_config,
|
||||
segment_len,
|
||||
path_app_exe: workspace_path.join("bundle/app.vmexe"),
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl Send for EuclidHandler {}
|
||||
|
||||
impl EuclidHandler {
|
||||
pub fn new(workspace_path: &str) -> Self {
|
||||
let p = Phase::EuclidV1;
|
||||
let workspace_path = Path::new(workspace_path);
|
||||
let chunk_prover = ChunkProver::setup(p.phase_spec_chunk(workspace_path))
|
||||
.expect("Failed to setup chunk prover");
|
||||
|
||||
let batch_prover = BatchProver::setup(p.phase_spec_batch(workspace_path))
|
||||
.expect("Failed to setup batch prover");
|
||||
|
||||
let bundle_prover = BundleProverEuclidV1::setup(p.phase_spec_bundle(workspace_path))
|
||||
.expect("Failed to setup bundle prover");
|
||||
|
||||
Self {
|
||||
chunk_prover,
|
||||
batch_prover,
|
||||
bundle_prover,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl CircuitsHandler for Arc<Mutex<EuclidHandler>> {
|
||||
async fn get_vk(&self, task_type: ProofType) -> Option<Vec<u8>> {
|
||||
Some(match task_type {
|
||||
ProofType::Chunk => self.try_lock().unwrap().chunk_prover.get_app_vk(),
|
||||
ProofType::Batch => self.try_lock().unwrap().batch_prover.get_app_vk(),
|
||||
ProofType::Bundle => self.try_lock().unwrap().bundle_prover.get_app_vk(),
|
||||
_ => unreachable!("Unsupported proof type"),
|
||||
})
|
||||
}
|
||||
|
||||
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String> {
|
||||
match prove_request.proof_type {
|
||||
ProofType::Chunk => {
|
||||
let task: ChunkProvingTask = serde_json::from_str(&prove_request.input)?;
|
||||
let proof = self.try_lock().unwrap().chunk_prover.gen_proof(&task)?;
|
||||
|
||||
Ok(serde_json::to_string(&proof)?)
|
||||
}
|
||||
ProofType::Batch => {
|
||||
let task: BatchProvingTask = serde_json::from_str(&prove_request.input)?;
|
||||
let proof = self.try_lock().unwrap().batch_prover.gen_proof(&task)?;
|
||||
|
||||
Ok(serde_json::to_string(&proof)?)
|
||||
}
|
||||
ProofType::Bundle => {
|
||||
let batch_proofs: BundleProvingTask = serde_json::from_str(&prove_request.input)?;
|
||||
let proof = self
|
||||
.try_lock()
|
||||
.unwrap()
|
||||
.bundle_prover
|
||||
.gen_proof_evm(&batch_proofs)?;
|
||||
|
||||
Ok(serde_json::to_string(&proof)?)
|
||||
}
|
||||
_ => Err(anyhow!("Unsupported proof type")),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,119 +0,0 @@
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
path::Path,
|
||||
sync::{Arc, OnceLock},
|
||||
};
|
||||
|
||||
use super::{CircuitsHandler, Phase};
|
||||
use crate::prover::CircuitConfig;
|
||||
use async_trait::async_trait;
|
||||
use base64::{prelude::BASE64_STANDARD, Engine};
|
||||
use eyre::Result;
|
||||
use scroll_proving_sdk::prover::{proving_service::ProveRequest, ProofType};
|
||||
use scroll_zkvm_prover_euclid::{BatchProver, BundleProverEuclidV2, ChunkProver};
|
||||
use scroll_zkvm_types::ProvingTask;
|
||||
use tokio::sync::Mutex;
|
||||
pub struct EuclidV2Handler {
|
||||
chunk_prover: ChunkProver,
|
||||
batch_prover: BatchProver,
|
||||
bundle_prover: BundleProverEuclidV2,
|
||||
cached_vks: HashMap<ProofType, OnceLock<String>>,
|
||||
}
|
||||
|
||||
unsafe impl Send for EuclidV2Handler {}
|
||||
|
||||
impl EuclidV2Handler {
|
||||
pub fn new(cfg: &CircuitConfig) -> Self {
|
||||
let workspace_path = &cfg.workspace_path;
|
||||
let p = Phase::EuclidV2;
|
||||
let workspace_path = Path::new(workspace_path);
|
||||
let chunk_prover = ChunkProver::setup(p.phase_spec_chunk(workspace_path))
|
||||
.expect("Failed to setup chunk prover");
|
||||
|
||||
let batch_prover = BatchProver::setup(p.phase_spec_batch(workspace_path))
|
||||
.expect("Failed to setup batch prover");
|
||||
|
||||
let bundle_prover = BundleProverEuclidV2::setup(p.phase_spec_bundle(workspace_path))
|
||||
.expect("Failed to setup bundle prover");
|
||||
|
||||
let build_vk_cache = |proof_type: ProofType| {
|
||||
let vk = if let Some(vk) = cfg.vks.get(&proof_type) {
|
||||
OnceLock::from(vk.clone())
|
||||
} else {
|
||||
OnceLock::new()
|
||||
};
|
||||
(proof_type, vk)
|
||||
};
|
||||
|
||||
Self {
|
||||
chunk_prover,
|
||||
batch_prover,
|
||||
bundle_prover,
|
||||
cached_vks: HashMap::from([
|
||||
build_vk_cache(ProofType::Chunk),
|
||||
build_vk_cache(ProofType::Batch),
|
||||
build_vk_cache(ProofType::Bundle),
|
||||
]),
|
||||
}
|
||||
}
|
||||
|
||||
/// get_prover get the inner prover, later we would replace chunk/batch/bundle_prover with
|
||||
/// universal prover, before that, use bundle_prover as the represent one
|
||||
pub fn get_prover(&self) -> &BundleProverEuclidV2 {
|
||||
&self.bundle_prover
|
||||
}
|
||||
|
||||
pub fn get_vk_and_cache(&self, task_type: ProofType) -> String {
|
||||
match task_type {
|
||||
ProofType::Chunk => self.cached_vks[&ProofType::Chunk]
|
||||
.get_or_init(|| BASE64_STANDARD.encode(self.chunk_prover.get_app_vk())),
|
||||
ProofType::Batch => self.cached_vks[&ProofType::Batch]
|
||||
.get_or_init(|| BASE64_STANDARD.encode(self.batch_prover.get_app_vk())),
|
||||
ProofType::Bundle => self.cached_vks[&ProofType::Bundle]
|
||||
.get_or_init(|| BASE64_STANDARD.encode(self.bundle_prover.get_app_vk())),
|
||||
_ => unreachable!("Unsupported proof type {:?}", task_type),
|
||||
}
|
||||
.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl CircuitsHandler for Arc<Mutex<EuclidV2Handler>> {
|
||||
async fn get_vk(&self, task_type: ProofType) -> String {
|
||||
self.lock().await.get_vk_and_cache(task_type)
|
||||
}
|
||||
|
||||
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String> {
|
||||
let handler_self = self.lock().await;
|
||||
let u_task: ProvingTask = serde_json::from_str(&prove_request.input)?;
|
||||
let expected_vk = handler_self.get_vk_and_cache(prove_request.proof_type);
|
||||
if BASE64_STANDARD.encode(&u_task.vk) != expected_vk {
|
||||
eyre::bail!(
|
||||
"vk is not match!, prove type {:?}, expected {}, get {}",
|
||||
prove_request.proof_type,
|
||||
expected_vk,
|
||||
BASE64_STANDARD.encode(&u_task.vk),
|
||||
);
|
||||
}
|
||||
|
||||
let proof = match prove_request.proof_type {
|
||||
ProofType::Chunk => handler_self
|
||||
.chunk_prover
|
||||
.gen_proof_universal(&u_task, false)?,
|
||||
ProofType::Batch => handler_self
|
||||
.batch_prover
|
||||
.gen_proof_universal(&u_task, false)?,
|
||||
ProofType::Bundle => handler_self
|
||||
.bundle_prover
|
||||
.gen_proof_universal(&u_task, true)?,
|
||||
_ => {
|
||||
return Err(eyre::eyre!(
|
||||
"Unsupported proof type {:?}",
|
||||
prove_request.proof_type
|
||||
))
|
||||
}
|
||||
};
|
||||
//TODO: check expected PI
|
||||
Ok(serde_json::to_string(&proof)?)
|
||||
}
|
||||
}
|
||||
55
crates/prover-bin/src/zk_circuits_handler/universal.rs
Normal file
55
crates/prover-bin/src/zk_circuits_handler/universal.rs
Normal file
@@ -0,0 +1,55 @@
|
||||
use std::path::Path;
|
||||
|
||||
use super::CircuitsHandler;
|
||||
use async_trait::async_trait;
|
||||
use eyre::Result;
|
||||
use scroll_proving_sdk::prover::ProofType;
|
||||
use scroll_zkvm_prover::{Prover, ProverConfig};
|
||||
use scroll_zkvm_types::ProvingTask;
|
||||
use tokio::sync::Mutex;
|
||||
pub struct UniversalHandler {
|
||||
prover: Prover,
|
||||
}
|
||||
|
||||
/// Safe for current usage as `CircuitsHandler` trait (protected inside of Mutex and NEVER extract
|
||||
/// the instance out by `into_inner`)
|
||||
unsafe impl Send for UniversalHandler {}
|
||||
|
||||
impl UniversalHandler {
|
||||
pub fn new(workspace_path: impl AsRef<Path>, _proof_type: ProofType) -> Result<Self> {
|
||||
let path_app_exe = workspace_path.as_ref().join("app.vmexe");
|
||||
let path_app_config = workspace_path.as_ref().join("openvm.toml");
|
||||
let segment_len = Some((1 << 22) - 100);
|
||||
let config = ProverConfig {
|
||||
path_app_config,
|
||||
path_app_exe,
|
||||
segment_len,
|
||||
};
|
||||
|
||||
let prover = Prover::setup(config, None)?;
|
||||
Ok(Self { prover })
|
||||
}
|
||||
|
||||
/// get_prover get the inner prover, later we would replace chunk/batch/bundle_prover with
|
||||
/// universal prover, before that, use bundle_prover as the represent one
|
||||
pub fn get_prover(&mut self) -> &mut Prover {
|
||||
&mut self.prover
|
||||
}
|
||||
|
||||
pub fn get_task_from_input(input: &str) -> Result<ProvingTask> {
|
||||
Ok(serde_json::from_str(input)?)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl CircuitsHandler for Mutex<UniversalHandler> {
|
||||
async fn get_proof_data(&self, u_task: &ProvingTask, need_snark: bool) -> Result<String> {
|
||||
let mut handler_self = self.lock().await;
|
||||
|
||||
let proof = handler_self
|
||||
.get_prover()
|
||||
.gen_proof_universal(u_task, need_snark)?;
|
||||
|
||||
Ok(serde_json::to_string(&proof)?)
|
||||
}
|
||||
}
|
||||
@@ -15,7 +15,6 @@
|
||||
},
|
||||
"chunk_proposer_config": {
|
||||
"propose_interval_milliseconds": 100,
|
||||
"max_block_num_per_chunk": 100,
|
||||
"max_l2_gas_per_chunk": 20000000,
|
||||
"chunk_timeout_sec": 300,
|
||||
"max_uncompressed_batch_bytes_size": 4194304
|
||||
|
||||
@@ -92,7 +92,6 @@
|
||||
},
|
||||
"chunk_proposer_config": {
|
||||
"propose_interval_milliseconds": 100,
|
||||
"max_block_num_per_chunk": 100,
|
||||
"max_l2_gas_per_chunk": 20000000,
|
||||
"chunk_timeout_sec": 300,
|
||||
"max_uncompressed_batch_bytes_size": 4194304
|
||||
|
||||
@@ -15,7 +15,7 @@ require (
|
||||
github.com/holiman/uint256 v1.3.2
|
||||
github.com/mitchellh/mapstructure v1.5.0
|
||||
github.com/prometheus/client_golang v1.16.0
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250825071838-cddc263e5ef6
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250626110859-cc9a1dd82de7
|
||||
github.com/smartystreets/goconvey v1.8.0
|
||||
github.com/spf13/viper v1.19.0
|
||||
|
||||
@@ -285,8 +285,8 @@ github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6ke
|
||||
github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4=
|
||||
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
|
||||
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250825071838-cddc263e5ef6 h1:EiLZgoRfGgF0QzZ1jGSlo60A6W7g9L/8XLWMhwwtKJ0=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250825071838-cddc263e5ef6/go.mod h1:Z6kN5u2khPhiqHyk172kGB7o38bH/nj7Ilrb/46wZGg=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178 h1:4utngmJHXSOS5FoSdZhEV1xMRirpArbXvyoCZY9nYj0=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178/go.mod h1:Z6kN5u2khPhiqHyk172kGB7o38bH/nj7Ilrb/46wZGg=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250626110859-cc9a1dd82de7 h1:1rN1qocsQlOyk1VCpIEF1J5pfQbLAi1pnMZSLQS37jQ=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250626110859-cc9a1dd82de7/go.mod h1:pDCZ4iGvEGmdIe4aSAGBrb7XSrKEML6/L/wEMmNxOdk=
|
||||
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=
|
||||
|
||||
@@ -31,7 +31,6 @@ type L2Config struct {
|
||||
// ChunkProposerConfig loads chunk_proposer configuration items.
|
||||
type ChunkProposerConfig struct {
|
||||
ProposeIntervalMilliseconds uint64 `json:"propose_interval_milliseconds"`
|
||||
MaxBlockNumPerChunk uint64 `json:"max_block_num_per_chunk"`
|
||||
MaxL2GasPerChunk uint64 `json:"max_l2_gas_per_chunk"`
|
||||
ChunkTimeoutSec uint64 `json:"chunk_timeout_sec"`
|
||||
MaxUncompressedBatchBytesSize uint64 `json:"max_uncompressed_batch_bytes_size"`
|
||||
|
||||
@@ -36,7 +36,7 @@ func testBatchProposerLimitsCodecV7(t *testing.T) {
|
||||
name: "Timeout",
|
||||
batchTimeoutSec: 0,
|
||||
expectedBatchesLen: 1,
|
||||
expectedChunksInFirstBatch: 2,
|
||||
expectedChunksInFirstBatch: 1,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -72,8 +72,7 @@ func testBatchProposerLimitsCodecV7(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
|
||||
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
|
||||
MaxBlockNumPerChunk: 1,
|
||||
MaxL2GasPerChunk: 20000000,
|
||||
MaxL2GasPerChunk: math.MaxUint64,
|
||||
ChunkTimeoutSec: 300,
|
||||
MaxUncompressedBatchBytesSize: math.MaxUint64,
|
||||
}, encoding.CodecV7, ¶ms.ChainConfig{
|
||||
@@ -154,7 +153,6 @@ func testBatchProposerBlobSizeLimitCodecV7(t *testing.T) {
|
||||
chainConfig := ¶ms.ChainConfig{LondonBlock: big.NewInt(0), BernoulliBlock: big.NewInt(0), CurieBlock: big.NewInt(0), DarwinTime: new(uint64), DarwinV2Time: new(uint64), EuclidTime: new(uint64), EuclidV2Time: new(uint64)}
|
||||
|
||||
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
|
||||
MaxBlockNumPerChunk: math.MaxUint64,
|
||||
MaxL2GasPerChunk: math.MaxUint64,
|
||||
ChunkTimeoutSec: 0,
|
||||
MaxUncompressedBatchBytesSize: math.MaxUint64,
|
||||
@@ -227,7 +225,6 @@ func testBatchProposerMaxChunkNumPerBatchLimitCodecV7(t *testing.T) {
|
||||
chainConfig := ¶ms.ChainConfig{LondonBlock: big.NewInt(0), BernoulliBlock: big.NewInt(0), CurieBlock: big.NewInt(0), DarwinTime: new(uint64), DarwinV2Time: new(uint64), EuclidTime: new(uint64), EuclidV2Time: new(uint64)}
|
||||
|
||||
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
|
||||
MaxBlockNumPerChunk: math.MaxUint64,
|
||||
MaxL2GasPerChunk: math.MaxUint64,
|
||||
ChunkTimeoutSec: 0,
|
||||
MaxUncompressedBatchBytesSize: math.MaxUint64,
|
||||
@@ -309,15 +306,14 @@ func testBatchProposerUncompressedBatchBytesLimitCodecV8(t *testing.T) {
|
||||
|
||||
// Create chunk proposer with no uncompressed batch bytes limit for chunks
|
||||
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
|
||||
MaxBlockNumPerChunk: 1, // One block per chunk
|
||||
MaxL2GasPerChunk: math.MaxUint64,
|
||||
MaxL2GasPerChunk: 1200000, // One block per chunk via gas limit
|
||||
ChunkTimeoutSec: math.MaxUint32,
|
||||
MaxUncompressedBatchBytesSize: math.MaxUint64,
|
||||
}, encoding.CodecV8, chainConfig, db, nil)
|
||||
|
||||
// Insert 2 blocks with large calldata and create 2 chunks
|
||||
l2BlockOrm := orm.NewL2Block(db)
|
||||
for i := uint64(1); i <= 2; i++ {
|
||||
for i := uint64(1); i <= 3; i++ {
|
||||
blockCopy := *block
|
||||
blockCopy.Header = &gethTypes.Header{}
|
||||
*blockCopy.Header = *block.Header
|
||||
@@ -326,7 +322,9 @@ func testBatchProposerUncompressedBatchBytesLimitCodecV8(t *testing.T) {
|
||||
err := l2BlockOrm.InsertL2Blocks(context.Background(), []*encoding.Block{&blockCopy})
|
||||
assert.NoError(t, err)
|
||||
|
||||
cp.TryProposeChunk() // Each call creates one chunk with one block
|
||||
cp.TryProposeChunk() // Each chunk will contain 1 block (~3KiB)
|
||||
// We create 2 chunks here, as we have 3 blocks and reach the gas limit for the 1st chunk with the 2nd block
|
||||
// and the 2nd chunk with the 3rd block.
|
||||
}
|
||||
|
||||
// Create batch proposer with 4KiB uncompressed batch bytes limit
|
||||
|
||||
@@ -86,15 +86,19 @@ func testBundleProposerLimitsCodecV7(t *testing.T) {
|
||||
_, err = batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV0, utils.BatchMetrics{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
block3 := *block1
|
||||
block3.Header = &gethTypes.Header{}
|
||||
*block3.Header = *block1.Header
|
||||
block3.Header.Number = new(big.Int).SetUint64(block2.Header.Number.Uint64() + 1)
|
||||
|
||||
l2BlockOrm := orm.NewL2Block(db)
|
||||
err = l2BlockOrm.InsertL2Blocks(context.Background(), []*encoding.Block{block1, block2})
|
||||
err = l2BlockOrm.InsertL2Blocks(context.Background(), []*encoding.Block{block1, block2, &block3})
|
||||
assert.NoError(t, err)
|
||||
|
||||
chainConfig := ¶ms.ChainConfig{LondonBlock: big.NewInt(0), BernoulliBlock: big.NewInt(0), CurieBlock: big.NewInt(0), DarwinTime: new(uint64), DarwinV2Time: new(uint64), EuclidTime: new(uint64), EuclidV2Time: new(uint64)}
|
||||
|
||||
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
|
||||
MaxBlockNumPerChunk: 1,
|
||||
MaxL2GasPerChunk: math.MaxUint64,
|
||||
MaxL2GasPerChunk: 1152994, // One block per chunk via gas limit
|
||||
ChunkTimeoutSec: math.MaxUint32,
|
||||
MaxUncompressedBatchBytesSize: math.MaxUint64,
|
||||
}, encoding.CodecV7, chainConfig, db, nil)
|
||||
|
||||
@@ -54,7 +54,6 @@ type ChunkProposer struct {
|
||||
// NewChunkProposer creates a new ChunkProposer instance.
|
||||
func NewChunkProposer(ctx context.Context, cfg *config.ChunkProposerConfig, minCodecVersion encoding.CodecVersion, chainCfg *params.ChainConfig, db *gorm.DB, reg prometheus.Registerer) *ChunkProposer {
|
||||
log.Info("new chunk proposer",
|
||||
"maxBlockNumPerChunk", cfg.MaxBlockNumPerChunk,
|
||||
"maxL2GasPerChunk", cfg.MaxL2GasPerChunk,
|
||||
"chunkTimeoutSec", cfg.ChunkTimeoutSec,
|
||||
"maxBlobSize", maxBlobSize)
|
||||
@@ -232,10 +231,9 @@ func (p *ChunkProposer) ProposeChunk() error {
|
||||
return err
|
||||
}
|
||||
|
||||
maxBlocksThisChunk := p.cfg.MaxBlockNumPerChunk
|
||||
|
||||
// select at most maxBlocksThisChunk blocks
|
||||
blocks, err := p.l2BlockOrm.GetL2BlocksGEHeight(p.ctx, unchunkedBlockHeight, int(maxBlocksThisChunk))
|
||||
// select blocks without a hard limit on count in practice (use a large value)
|
||||
// The actual limits will be enforced by gas, timeout, and blob size constraints
|
||||
blocks, err := p.l2BlockOrm.GetL2BlocksGEHeight(p.ctx, unchunkedBlockHeight, 1000)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -251,7 +249,7 @@ func (p *ChunkProposer) ProposeChunk() error {
|
||||
currentHardfork := encoding.GetHardforkName(p.chainCfg, blocks[i].Header.Number.Uint64(), blocks[i].Header.Time)
|
||||
if currentHardfork != hardforkName {
|
||||
blocks = blocks[:i]
|
||||
maxBlocksThisChunk = uint64(i) // update maxBlocksThisChunk to trigger chunking, because these blocks are the last blocks before the hardfork
|
||||
// Truncate blocks at hardfork boundary
|
||||
break
|
||||
}
|
||||
}
|
||||
@@ -324,8 +322,8 @@ func (p *ChunkProposer) ProposeChunk() error {
|
||||
}
|
||||
|
||||
currentTimeSec := uint64(time.Now().Unix())
|
||||
if metrics.FirstBlockTimestamp+p.cfg.ChunkTimeoutSec < currentTimeSec || metrics.NumBlocks == maxBlocksThisChunk {
|
||||
log.Info("reached maximum number of blocks in chunk or first block timeout",
|
||||
if metrics.FirstBlockTimestamp+p.cfg.ChunkTimeoutSec < currentTimeSec {
|
||||
log.Info("first block timeout reached",
|
||||
"block count", len(chunk.Blocks),
|
||||
"start block number", chunk.Blocks[0].Header.Number,
|
||||
"start block timestamp", metrics.FirstBlockTimestamp,
|
||||
|
||||
@@ -22,7 +22,6 @@ import (
|
||||
func testChunkProposerLimitsCodecV7(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
maxBlockNum uint64
|
||||
maxL2Gas uint64
|
||||
chunkTimeoutSec uint64
|
||||
expectedChunksLen int
|
||||
@@ -30,14 +29,12 @@ func testChunkProposerLimitsCodecV7(t *testing.T) {
|
||||
}{
|
||||
{
|
||||
name: "NoLimitReached",
|
||||
maxBlockNum: 100,
|
||||
maxL2Gas: 20_000_000,
|
||||
chunkTimeoutSec: 1000000000000,
|
||||
expectedChunksLen: 0,
|
||||
},
|
||||
{
|
||||
name: "Timeout",
|
||||
maxBlockNum: 100,
|
||||
maxL2Gas: 20_000_000,
|
||||
chunkTimeoutSec: 0,
|
||||
expectedChunksLen: 1,
|
||||
@@ -45,15 +42,13 @@ func testChunkProposerLimitsCodecV7(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "MaxL2GasPerChunkIs0",
|
||||
maxBlockNum: 10,
|
||||
maxL2Gas: 0,
|
||||
chunkTimeoutSec: 1000000000000,
|
||||
expectedChunksLen: 0,
|
||||
},
|
||||
{
|
||||
name: "MaxBlockNumPerChunkIs1",
|
||||
maxBlockNum: 1,
|
||||
maxL2Gas: 20_000_000,
|
||||
name: "SingleBlockByGasLimit",
|
||||
maxL2Gas: 1_100_000,
|
||||
chunkTimeoutSec: 1000000000000,
|
||||
expectedChunksLen: 1,
|
||||
expectedBlocksInFirstChunk: 1,
|
||||
@@ -62,7 +57,6 @@ func testChunkProposerLimitsCodecV7(t *testing.T) {
|
||||
// In this test the second block is not included in the chunk because together
|
||||
// with the first block it exceeds the maxL2GasPerChunk limit.
|
||||
name: "MaxL2GasPerChunkIsSecondBlock",
|
||||
maxBlockNum: 10,
|
||||
maxL2Gas: 1_153_000,
|
||||
chunkTimeoutSec: 1000000000000,
|
||||
expectedChunksLen: 1,
|
||||
@@ -85,7 +79,6 @@ func testChunkProposerLimitsCodecV7(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
|
||||
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
|
||||
MaxBlockNumPerChunk: tt.maxBlockNum,
|
||||
MaxL2GasPerChunk: tt.maxL2Gas,
|
||||
ChunkTimeoutSec: tt.chunkTimeoutSec,
|
||||
MaxUncompressedBatchBytesSize: math.MaxUint64,
|
||||
@@ -110,53 +103,6 @@ func testChunkProposerLimitsCodecV7(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func testChunkProposerBlobSizeLimitCodecV7(t *testing.T) {
|
||||
db := setupDB(t)
|
||||
defer database.CloseDB(db)
|
||||
block := readBlockFromJSON(t, "../../../testdata/blockTrace_03.json")
|
||||
for i := uint64(0); i < 510; i++ {
|
||||
l2BlockOrm := orm.NewL2Block(db)
|
||||
block.Header.Number = new(big.Int).SetUint64(i + 1)
|
||||
block.Header.Time = i + 1
|
||||
err := l2BlockOrm.InsertL2Blocks(context.Background(), []*encoding.Block{block})
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
// Add genesis chunk.
|
||||
chunkOrm := orm.NewChunk(db)
|
||||
_, err := chunkOrm.InsertChunk(context.Background(), &encoding.Chunk{Blocks: []*encoding.Block{{Header: &gethTypes.Header{Number: big.NewInt(0)}}}}, encoding.CodecV0, utils.ChunkMetrics{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
chainConfig := ¶ms.ChainConfig{LondonBlock: big.NewInt(0), BernoulliBlock: big.NewInt(0), CurieBlock: big.NewInt(0), DarwinTime: new(uint64), DarwinV2Time: new(uint64), EuclidTime: new(uint64), EuclidV2Time: new(uint64)}
|
||||
|
||||
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
|
||||
MaxBlockNumPerChunk: 255,
|
||||
MaxL2GasPerChunk: math.MaxUint64,
|
||||
ChunkTimeoutSec: math.MaxUint32,
|
||||
MaxUncompressedBatchBytesSize: math.MaxUint64,
|
||||
}, encoding.CodecV7, chainConfig, db, nil)
|
||||
|
||||
for i := 0; i < 2; i++ {
|
||||
cp.TryProposeChunk()
|
||||
}
|
||||
|
||||
chunkOrm = orm.NewChunk(db)
|
||||
chunks, err := chunkOrm.GetChunksGEIndex(context.Background(), 1, 0)
|
||||
assert.NoError(t, err)
|
||||
|
||||
var expectedNumChunks int = 2
|
||||
var numBlocksMultiplier uint64 = 255
|
||||
assert.Len(t, chunks, expectedNumChunks)
|
||||
|
||||
for i, chunk := range chunks {
|
||||
expected := numBlocksMultiplier * (uint64(i) + 1)
|
||||
if expected > 2000 {
|
||||
expected = 2000
|
||||
}
|
||||
assert.Equal(t, expected, chunk.EndBlockNumber)
|
||||
}
|
||||
}
|
||||
|
||||
func testChunkProposerUncompressedBatchBytesLimitCodecV8(t *testing.T) {
|
||||
db := setupDB(t)
|
||||
defer database.CloseDB(db)
|
||||
@@ -204,7 +150,6 @@ func testChunkProposerUncompressedBatchBytesLimitCodecV8(t *testing.T) {
|
||||
// Set max_uncompressed_batch_bytes_size to 4KiB (4 * 1024)
|
||||
// One block (~3KiB) should fit, but two blocks (~6KiB) should exceed the limit
|
||||
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
|
||||
MaxBlockNumPerChunk: math.MaxUint64, // No block number limit
|
||||
MaxL2GasPerChunk: math.MaxUint64, // No gas limit
|
||||
ChunkTimeoutSec: math.MaxUint32, // No timeout limit
|
||||
MaxUncompressedBatchBytesSize: 4 * 1024, // 4KiB limit
|
||||
|
||||
@@ -102,7 +102,6 @@ func TestFunction(t *testing.T) {
|
||||
|
||||
// Run chunk proposer test cases.
|
||||
t.Run("TestChunkProposerLimitsCodecV7", testChunkProposerLimitsCodecV7)
|
||||
t.Run("TestChunkProposerBlobSizeLimitCodecV7", testChunkProposerBlobSizeLimitCodecV7)
|
||||
t.Run("TestChunkProposerUncompressedBatchBytesLimitCodecV8", testChunkProposerUncompressedBatchBytesLimitCodecV8)
|
||||
|
||||
// Run batch proposer test cases.
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
"l2_config": {
|
||||
"endpoint": "https://rpc.scroll.io",
|
||||
"chunk_proposer_config": {
|
||||
"max_block_num_per_chunk": 100,
|
||||
"max_l2_gas_per_chunk": 20000000,
|
||||
"chunk_timeout_sec": 72000000000,
|
||||
"max_uncompressed_batch_bytes_size": 4194304
|
||||
|
||||
215
rollup/tests/integration_tool/imports.go
Normal file
215
rollup/tests/integration_tool/imports.go
Normal file
@@ -0,0 +1,215 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"math/rand"
|
||||
"sort"
|
||||
|
||||
"gorm.io/gorm"
|
||||
|
||||
"github.com/scroll-tech/da-codec/encoding"
|
||||
"github.com/scroll-tech/go-ethereum/common"
|
||||
"github.com/scroll-tech/go-ethereum/log"
|
||||
|
||||
"scroll-tech/common/database"
|
||||
|
||||
"scroll-tech/rollup/internal/orm"
|
||||
"scroll-tech/rollup/internal/utils"
|
||||
)
|
||||
|
||||
type importRecord struct {
|
||||
Chunk []string `json:"chunks"`
|
||||
Batch []string `json:"batches"`
|
||||
Bundle []string `json:"bundles"`
|
||||
}
|
||||
|
||||
func randomPickKfromN(n, k int, rng *rand.Rand) []int {
|
||||
ret := make([]int, n-1)
|
||||
for i := 1; i < n; i++ {
|
||||
ret[i-1] = i
|
||||
}
|
||||
|
||||
rng.Shuffle(len(ret), func(i, j int) {
|
||||
ret[i], ret[j] = ret[j], ret[i]
|
||||
})
|
||||
|
||||
ret = ret[:k-1]
|
||||
sort.Ints(ret)
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func importData(ctx context.Context, beginBlk, endBlk uint64, chkNum, batchNum, bundleNum int, seed int64) (*importRecord, error) {
|
||||
|
||||
db, err := database.InitDB(cfg.DBConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret := &importRecord{}
|
||||
// Create a new random source with the provided seed
|
||||
source := rand.NewSource(seed)
|
||||
//nolint:all
|
||||
rng := rand.New(source)
|
||||
|
||||
chkSepIdx := randomPickKfromN(int(endBlk-beginBlk)+1, chkNum, rng)
|
||||
chkSep := make([]uint64, len(chkSepIdx))
|
||||
for i, ind := range chkSepIdx {
|
||||
chkSep[i] = beginBlk + uint64(ind)
|
||||
}
|
||||
chkSep = append(chkSep, endBlk+1)
|
||||
|
||||
log.Info("separated chunk", "border", chkSep)
|
||||
head := beginBlk
|
||||
lastMsgHash := common.Hash{}
|
||||
|
||||
ormChks := make([]*orm.Chunk, 0, chkNum)
|
||||
encChks := make([]*encoding.Chunk, 0, chkNum)
|
||||
for _, edBlk := range chkSep {
|
||||
ormChk, chk, err := importChunk(ctx, db, head, edBlk-1, lastMsgHash)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
lastMsgHash = chk.PostL1MessageQueueHash
|
||||
ormChks = append(ormChks, ormChk)
|
||||
encChks = append(encChks, chk)
|
||||
head = edBlk
|
||||
}
|
||||
|
||||
for _, chk := range ormChks {
|
||||
ret.Chunk = append(ret.Chunk, chk.Hash)
|
||||
}
|
||||
|
||||
batchSep := randomPickKfromN(chkNum, batchNum, rng)
|
||||
batchSep = append(batchSep, chkNum)
|
||||
log.Info("separated batch", "border", batchSep)
|
||||
|
||||
headChk := int(0)
|
||||
batches := make([]*orm.Batch, 0, batchNum)
|
||||
var lastBatch *orm.Batch
|
||||
for _, endChk := range batchSep {
|
||||
batch, err := importBatch(ctx, db, ormChks[headChk:endChk], encChks[headChk:endChk], lastBatch)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
lastBatch = batch
|
||||
batches = append(batches, batch)
|
||||
headChk = endChk
|
||||
}
|
||||
|
||||
for _, batch := range batches {
|
||||
ret.Batch = append(ret.Batch, batch.Hash)
|
||||
}
|
||||
|
||||
bundleSep := randomPickKfromN(batchNum, bundleNum, rng)
|
||||
bundleSep = append(bundleSep, batchNum)
|
||||
log.Info("separated bundle", "border", bundleSep)
|
||||
|
||||
headBatch := int(0)
|
||||
for _, endBatch := range bundleSep {
|
||||
hash, err := importBundle(ctx, db, batches[headBatch:endBatch])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Bundle = append(ret.Bundle, hash)
|
||||
headBatch = endBatch
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func importChunk(ctx context.Context, db *gorm.DB, beginBlk, endBlk uint64, prevMsgQueueHash common.Hash) (*orm.Chunk, *encoding.Chunk, error) {
|
||||
nblk := int(endBlk-beginBlk) + 1
|
||||
blockOrm := orm.NewL2Block(db)
|
||||
|
||||
blks, err := blockOrm.GetL2BlocksGEHeight(ctx, beginBlk, nblk)
|
||||
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
postHash, err := encoding.MessageQueueV2ApplyL1MessagesFromBlocks(prevMsgQueueHash, blks)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
theChunk := &encoding.Chunk{
|
||||
Blocks: blks,
|
||||
PrevL1MessageQueueHash: prevMsgQueueHash,
|
||||
PostL1MessageQueueHash: postHash,
|
||||
}
|
||||
chunkOrm := orm.NewChunk(db)
|
||||
|
||||
dbChk, err := chunkOrm.InsertChunk(ctx, theChunk, codecCfg, utils.ChunkMetrics{})
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
err = blockOrm.UpdateChunkHashInRange(ctx, beginBlk, endBlk, dbChk.Hash)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
log.Info("insert chunk", "From", beginBlk, "To", endBlk, "hash", dbChk.Hash)
|
||||
return dbChk, theChunk, nil
|
||||
}
|
||||
|
||||
func importBatch(ctx context.Context, db *gorm.DB, chks []*orm.Chunk, encChks []*encoding.Chunk, last *orm.Batch) (*orm.Batch, error) {
|
||||
|
||||
batchOrm := orm.NewBatch(db)
|
||||
if last == nil {
|
||||
var err error
|
||||
last, err = batchOrm.GetLatestBatch(ctx)
|
||||
if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) {
|
||||
return nil, err
|
||||
} else if last != nil {
|
||||
log.Info("start from last batch", "index", last.Index)
|
||||
}
|
||||
}
|
||||
|
||||
index := uint64(0)
|
||||
var parentHash common.Hash
|
||||
if last != nil {
|
||||
index = last.Index + 1
|
||||
parentHash = common.HexToHash(last.Hash)
|
||||
}
|
||||
|
||||
var blks []*encoding.Block
|
||||
for _, chk := range encChks {
|
||||
blks = append(blks, chk.Blocks...)
|
||||
}
|
||||
|
||||
batch := &encoding.Batch{
|
||||
Index: index,
|
||||
TotalL1MessagePoppedBefore: chks[0].TotalL1MessagesPoppedBefore,
|
||||
ParentBatchHash: parentHash,
|
||||
Chunks: encChks,
|
||||
Blocks: blks,
|
||||
}
|
||||
|
||||
dbBatch, err := batchOrm.InsertBatch(ctx, batch, codecCfg, utils.BatchMetrics{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = orm.NewChunk(db).UpdateBatchHashInRange(ctx, chks[0].Index, chks[len(chks)-1].Index, dbBatch.Hash)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
log.Info("insert batch", "index", index)
|
||||
return dbBatch, nil
|
||||
}
|
||||
|
||||
func importBundle(ctx context.Context, db *gorm.DB, batches []*orm.Batch) (string, error) {
|
||||
|
||||
bundleOrm := orm.NewBundle(db)
|
||||
bundle, err := bundleOrm.InsertBundle(ctx, batches, codecCfg)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
err = orm.NewBatch(db).UpdateBundleHashInRange(ctx, batches[0].Index, batches[len(batches)-1].Index, bundle.Hash)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
log.Info("insert bundle", "hash", bundle.Hash)
|
||||
return bundle.Hash, nil
|
||||
}
|
||||
198
rollup/tests/integration_tool/main.go
Normal file
198
rollup/tests/integration_tool/main.go
Normal file
@@ -0,0 +1,198 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/scroll-tech/da-codec/encoding"
|
||||
"github.com/scroll-tech/go-ethereum/log"
|
||||
"github.com/urfave/cli/v2"
|
||||
|
||||
"scroll-tech/common/database"
|
||||
"scroll-tech/common/utils"
|
||||
"scroll-tech/common/version"
|
||||
)
|
||||
|
||||
var app *cli.App
|
||||
var cfg *config
|
||||
var codecCfg encoding.CodecVersion = encoding.CodecV8
|
||||
|
||||
var outputNumFlag = cli.StringFlag{
|
||||
Name: "counts",
|
||||
Usage: "Counts for output (chunks,batches,bundles)",
|
||||
Value: "4,2,1",
|
||||
}
|
||||
|
||||
var outputPathFlag = cli.StringFlag{
|
||||
Name: "output",
|
||||
Usage: "output file path",
|
||||
Value: "testset.json",
|
||||
}
|
||||
|
||||
var seedFlag = cli.Int64Flag{
|
||||
Name: "seed",
|
||||
Usage: "random seed, 0 to use random selected seed",
|
||||
Value: 0,
|
||||
}
|
||||
|
||||
var codecFlag = cli.IntFlag{
|
||||
Name: "codec",
|
||||
Usage: "codec version, valid from 6, default(auto) is 0",
|
||||
Value: 0,
|
||||
}
|
||||
|
||||
func parseThreeIntegers(value string) (int, int, int, error) {
|
||||
// Split the input string by comma
|
||||
parts := strings.Split(value, ",")
|
||||
|
||||
// Check that we have exactly 3 parts
|
||||
if len(parts) != 3 {
|
||||
return 0, 0, 0, fmt.Errorf("input must contain exactly 3 comma-separated integers, got %s", value)
|
||||
}
|
||||
|
||||
// Parse the three integers
|
||||
values := make([]int, 3)
|
||||
for i, part := range parts {
|
||||
// Trim any whitespace
|
||||
part = strings.TrimSpace(part)
|
||||
|
||||
// Parse the integer
|
||||
val, err := strconv.Atoi(part)
|
||||
if err != nil {
|
||||
return 0, 0, 0, fmt.Errorf("failed to parse '%s' as integer: %w", part, err)
|
||||
}
|
||||
|
||||
// Check that it's positive
|
||||
if val <= 0 {
|
||||
return 0, 0, 0, fmt.Errorf("all integers must be greater than 0, got %d", val)
|
||||
}
|
||||
|
||||
values[i] = val
|
||||
}
|
||||
|
||||
// Check that first >= second >= third
|
||||
if values[0] < values[1] || values[1] < values[2] {
|
||||
return 0, 0, 0, fmt.Errorf("integers must be in descending order: %d >= %d >= %d",
|
||||
values[0], values[1], values[2])
|
||||
}
|
||||
|
||||
return values[0], values[1], values[2], nil
|
||||
}
|
||||
|
||||
// load a comptabile type of config for rollup
|
||||
type config struct {
|
||||
DBConfig *database.Config `json:"db_config"`
|
||||
}
|
||||
|
||||
func init() {
|
||||
// Set up coordinator app info.
|
||||
app = cli.NewApp()
|
||||
app.Action = action
|
||||
app.Name = "integration-test-tool"
|
||||
app.Usage = "The Scroll L2 Integration Test Tool"
|
||||
app.Version = version.Version
|
||||
app.Flags = append(app.Flags, &codecFlag, &seedFlag, &outputNumFlag, &outputPathFlag)
|
||||
app.Flags = append(app.Flags, utils.CommonFlags...)
|
||||
app.Before = func(ctx *cli.Context) error {
|
||||
if err := utils.LogSetup(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
cfgFile := ctx.String(utils.ConfigFileFlag.Name)
|
||||
var err error
|
||||
cfg, err = newConfig(cfgFile)
|
||||
if err != nil {
|
||||
log.Crit("failed to load config file", "config file", cfgFile, "error", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(file string) (*config, error) {
|
||||
buf, err := os.ReadFile(filepath.Clean(file))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cfg := &config{}
|
||||
err = json.Unmarshal(buf, cfg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
func action(ctx *cli.Context) error {
|
||||
|
||||
if ctx.Args().Len() < 2 {
|
||||
return fmt.Errorf("specify begin and end block number")
|
||||
}
|
||||
|
||||
codecFl := ctx.Int(codecFlag.Name)
|
||||
if codecFl != 0 {
|
||||
switch codecFl {
|
||||
case 6:
|
||||
codecCfg = encoding.CodecV6
|
||||
case 7:
|
||||
codecCfg = encoding.CodecV7
|
||||
case 8:
|
||||
codecCfg = encoding.CodecV8
|
||||
default:
|
||||
return fmt.Errorf("invalid codec version %d", codecFl)
|
||||
}
|
||||
log.Info("set codec", "version", codecCfg)
|
||||
}
|
||||
|
||||
beginBlk, err := strconv.ParseUint(ctx.Args().First(), 10, 64)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid begin block number: %w", err)
|
||||
}
|
||||
endBlk, err := strconv.ParseUint(ctx.Args().Get(1), 10, 64)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid begin block number: %w", err)
|
||||
}
|
||||
|
||||
chkNum, batchNum, bundleNum, err := parseThreeIntegers(ctx.String(outputNumFlag.Name))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
seed := ctx.Int64(seedFlag.Name)
|
||||
//nolint:all
|
||||
if seed == 0 {
|
||||
seed = rand.Int63()
|
||||
}
|
||||
|
||||
outputPath := ctx.String(outputPathFlag.Name)
|
||||
log.Info("output", "Seed", seed, "file", outputPath)
|
||||
ret, err := importData(ctx.Context, beginBlk, endBlk, chkNum, batchNum, bundleNum, seed)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Marshal the ret variable to JSON with indentation for readability
|
||||
jsonData, err := json.MarshalIndent(ret, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal result data to JSON: %w", err)
|
||||
}
|
||||
|
||||
// Write the JSON data to the specified file
|
||||
err = os.WriteFile(outputPath, jsonData, 0600)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write result to file %s: %w", outputPath, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
if err := app.Run(os.Args); err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
@@ -118,7 +118,6 @@ func testCommitBatchAndFinalizeBundleCodecV7(t *testing.T) {
|
||||
}
|
||||
|
||||
cp := watcher.NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
|
||||
MaxBlockNumPerChunk: 100,
|
||||
MaxL2GasPerChunk: math.MaxUint64,
|
||||
ChunkTimeoutSec: 300,
|
||||
MaxUncompressedBatchBytesSize: math.MaxUint64,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
[toolchain]
|
||||
channel = "nightly-2025-02-14"
|
||||
targets = ["riscv32im-unknown-none-elf", "x86_64-unknown-linux-gnu"]
|
||||
channel = "nightly-2025-08-18"
|
||||
targets = ["riscv32im-unknown-none-elf", "x86_64-unknown-linux-gnu"]
|
||||
components = ["llvm-tools", "rustc-dev"]
|
||||
@@ -5,7 +5,7 @@ go 1.22
|
||||
toolchain go1.22.2
|
||||
|
||||
require (
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250825071838-cddc263e5ef6
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250625112225-a67863c65587
|
||||
github.com/stretchr/testify v1.10.0
|
||||
gorm.io/gorm v1.25.7-0.20240204074919-46816ad31dde
|
||||
|
||||
@@ -93,8 +93,8 @@ github.com/rjeczalik/notify v0.9.1 h1:CLCKso/QK1snAlnhNR/CNvNiFU2saUtjV0bx3EwNeC
|
||||
github.com/rjeczalik/notify v0.9.1/go.mod h1:rKwnCoCGeuQnwBtTSPL9Dad03Vh2n40ePRrjvIXnJho=
|
||||
github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
|
||||
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250825071838-cddc263e5ef6 h1:EiLZgoRfGgF0QzZ1jGSlo60A6W7g9L/8XLWMhwwtKJ0=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250825071838-cddc263e5ef6/go.mod h1:Z6kN5u2khPhiqHyk172kGB7o38bH/nj7Ilrb/46wZGg=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178 h1:4utngmJHXSOS5FoSdZhEV1xMRirpArbXvyoCZY9nYj0=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178/go.mod h1:Z6kN5u2khPhiqHyk172kGB7o38bH/nj7Ilrb/46wZGg=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250625112225-a67863c65587 h1:wG1+gb+K4iLtxAHhiAreMdIjP5x9hB64duraN2+u1QU=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250625112225-a67863c65587/go.mod h1:YyfB2AyAtphlbIuDQgaxc2b9mo0zE4EBA1+qtXvzlmg=
|
||||
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=
|
||||
|
||||
3
tests/prover-e2e/.env
Normal file
3
tests/prover-e2e/.env
Normal file
@@ -0,0 +1,3 @@
|
||||
GOOSE_DRIVER=postgres
|
||||
GOOSE_DBSTRING=postgresql://dev:dev@localhost:5432/scroll?sslmode=disable
|
||||
GOOSE_MIGRATION_DIR=../../database/migrate/migrations
|
||||
2
tests/prover-e2e/.gitignore
vendored
Normal file
2
tests/prover-e2e/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
build/*
|
||||
testset.json
|
||||
132
tests/prover-e2e/00100_import_blocks.sql
Normal file
132
tests/prover-e2e/00100_import_blocks.sql
Normal file
@@ -0,0 +1,132 @@
|
||||
-- +goose Up
|
||||
-- +goose StatementBegin
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973700', '0x29c84f0df09fda2c6c63d314bb6714dbbdeca3b85c91743f9e25d3f81c28b986', '0x01aabb5d1d7edadd10011b4099de7ed703b9ce495717cd48a304ff4db3710d8a', '{"parentHash":"0x01aabb5d1d7edadd10011b4099de7ed703b9ce495717cd48a304ff4db3710d8a","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77204","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36e5","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x29c84f0df09fda2c6c63d314bb6714dbbdeca3b85c91743f9e25d3f81c28b986"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf', '0', '0', '1753167589', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973701', '0x21ad5215b5b51cb5eb5ea6a19444804ca1628cbf8ef8cf1977660d8c468c0151', '0x29c84f0df09fda2c6c63d314bb6714dbbdeca3b85c91743f9e25d3f81c28b986', '{"parentHash":"0x29c84f0df09fda2c6c63d314bb6714dbbdeca3b85c91743f9e25d3f81c28b986","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77205","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36e6","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x21ad5215b5b51cb5eb5ea6a19444804ca1628cbf8ef8cf1977660d8c468c0151"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf', '0', '0', '1753167590', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973702', '0x8c9ce33a9b62060b193c01f31518f3bfc5d8132c11569a5b4db03a5b0611f30e', '0x21ad5215b5b51cb5eb5ea6a19444804ca1628cbf8ef8cf1977660d8c468c0151', '{"parentHash":"0x21ad5215b5b51cb5eb5ea6a19444804ca1628cbf8ef8cf1977660d8c468c0151","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77206","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36e7","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x8c9ce33a9b62060b193c01f31518f3bfc5d8132c11569a5b4db03a5b0611f30e"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf', '0', '0', '1753167591', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973703', '0x73eed8060ca9a36fe8bf8c981f0e44425cd69ade00ff986452f1c02d462194fe', '0x8c9ce33a9b62060b193c01f31518f3bfc5d8132c11569a5b4db03a5b0611f30e', '{"parentHash":"0x8c9ce33a9b62060b193c01f31518f3bfc5d8132c11569a5b4db03a5b0611f30e","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77207","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36e8","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x73eed8060ca9a36fe8bf8c981f0e44425cd69ade00ff986452f1c02d462194fe"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf', '0', '0', '1753167592', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973704', '0x7a6a1bede8936cfbd677cf38c43e399c8a2d0b62700caf05513bad540541b1b5', '0x73eed8060ca9a36fe8bf8c981f0e44425cd69ade00ff986452f1c02d462194fe', '{"parentHash":"0x73eed8060ca9a36fe8bf8c981f0e44425cd69ade00ff986452f1c02d462194fe","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77208","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36e9","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x7a6a1bede8936cfbd677cf38c43e399c8a2d0b62700caf05513bad540541b1b5"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf', '0', '0', '1753167593', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973705', '0x1a5306293b7801a42c4402f9d32e7a45d640c49506ee61452da0120f3e242424', '0x7a6a1bede8936cfbd677cf38c43e399c8a2d0b62700caf05513bad540541b1b5', '{"parentHash":"0x7a6a1bede8936cfbd677cf38c43e399c8a2d0b62700caf05513bad540541b1b5","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77209","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36ea","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x1a5306293b7801a42c4402f9d32e7a45d640c49506ee61452da0120f3e242424"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf', '0', '0', '1753167594', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973706', '0xc89f391a03c7138f676bd8babed6589035b2b7d8c8b99071cb90661f7f996386', '0x1a5306293b7801a42c4402f9d32e7a45d640c49506ee61452da0120f3e242424', '{"parentHash":"0x1a5306293b7801a42c4402f9d32e7a45d640c49506ee61452da0120f3e242424","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7720a","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36eb","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xc89f391a03c7138f676bd8babed6589035b2b7d8c8b99071cb90661f7f996386"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf', '0', '0', '1753167595', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973707', '0x38d72b14ef43e548ab0ffd84892e7c3accdd11e1121c2c7a94b953b4e896eb41', '0xc89f391a03c7138f676bd8babed6589035b2b7d8c8b99071cb90661f7f996386', '{"parentHash":"0xc89f391a03c7138f676bd8babed6589035b2b7d8c8b99071cb90661f7f996386","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xb920df561f210a617a0c1567cb2f65350818b96b159f5aa4b9ac7915b7af4946","transactionsRoot":"0xf14cf5134833ddb4f42a017e92af371f0a71eaf5d84cb6e681c81fa023662c5d","receiptsRoot":"0x4008fb883088f1ba377310e15221fffc8e5446faf420d6a28e061e9341beb056","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000900000000000000000000000000000000000000000000000000000000000000000000000001000000008000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000020000000000000000000","difficulty":"0x1","number":"0xa7720b","gasLimit":"0x1312d00","gasUsed":"0x9642","timestamp":"0x687f36ec","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x38d72b14ef43e548ab0ffd84892e7c3accdd11e1121c2c7a94b953b4e896eb41"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xb920df561f210a617a0c1567cb2f65350818b96b159f5aa4b9ac7915b7af4946', '1', '38466', '1753167596', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[{"type":2,"nonce":3392500,"txHash":"0xc15b615906602154131a6c42d7603def4bd2a769881292d831140b0b9b8f8850","gas":45919,"gasPrice":"0x1de8476","gasTipCap":"0x64","gasFeeCap":"0x1de8476","from":"0x0000000000000000000000000000000000000000","to":"0x5300000000000000000000000000000000000002","chainId":"0x8274f","value":"0x0","data":"0x39455d3a0000000000000000000000000000000000000000000000000000000000045b840000000000000000000000000000000000000000000000000000000000000001","isCreate":false,"accessList":[{"address":"0x5300000000000000000000000000000000000003","storageKeys":["0x297c59f20c6b2556a4ed35dccabbdeb8b1cf950f62aefb86b98d19b5a4aff2a2"]}],"authorizationList":null,"v":"0x1","r":"0xa1b888cc9be7990c4f6bd8a9d0d5fa743ea8173196c7ca871464becd133ba0de","s":"0x6bacc3e1a244c62eff3008795e010598d07b95a8bad7a5592ec941e121294885"}]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973708', '0x230863f98595ba0c83785caf618072ce2a876307102adbeba11b9de9c4af8a08', '0x38d72b14ef43e548ab0ffd84892e7c3accdd11e1121c2c7a94b953b4e896eb41', '{"parentHash":"0x38d72b14ef43e548ab0ffd84892e7c3accdd11e1121c2c7a94b953b4e896eb41","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xb920df561f210a617a0c1567cb2f65350818b96b159f5aa4b9ac7915b7af4946","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7720c","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36ed","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x230863f98595ba0c83785caf618072ce2a876307102adbeba11b9de9c4af8a08"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xb920df561f210a617a0c1567cb2f65350818b96b159f5aa4b9ac7915b7af4946', '0', '0', '1753167597', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973709', '0xae4af19a7697c2bb10a641f07269ed7df66775d56414567245adc98befdae557', '0x230863f98595ba0c83785caf618072ce2a876307102adbeba11b9de9c4af8a08', '{"parentHash":"0x230863f98595ba0c83785caf618072ce2a876307102adbeba11b9de9c4af8a08","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xb920df561f210a617a0c1567cb2f65350818b96b159f5aa4b9ac7915b7af4946","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7720d","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36ee","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xae4af19a7697c2bb10a641f07269ed7df66775d56414567245adc98befdae557"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xb920df561f210a617a0c1567cb2f65350818b96b159f5aa4b9ac7915b7af4946', '0', '0', '1753167598', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973710', '0xd2bc7e24b66940a767abaee485870e9ebd24ff28e72a3096cdccc55b85f84182', '0xae4af19a7697c2bb10a641f07269ed7df66775d56414567245adc98befdae557', '{"parentHash":"0xae4af19a7697c2bb10a641f07269ed7df66775d56414567245adc98befdae557","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xb920df561f210a617a0c1567cb2f65350818b96b159f5aa4b9ac7915b7af4946","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7720e","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36ef","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xd2bc7e24b66940a767abaee485870e9ebd24ff28e72a3096cdccc55b85f84182"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xb920df561f210a617a0c1567cb2f65350818b96b159f5aa4b9ac7915b7af4946', '0', '0', '1753167599', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973711', '0x1870b94320db154de4702fe6bfb69ebc98fb531b78bf81a69b8ab658ba9d9af5', '0xd2bc7e24b66940a767abaee485870e9ebd24ff28e72a3096cdccc55b85f84182', '{"parentHash":"0xd2bc7e24b66940a767abaee485870e9ebd24ff28e72a3096cdccc55b85f84182","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x2bdf2906a7bbb398419246c3c77804a204641259b2aeb4f4a806eb772d31c480","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7720f","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36f0","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x1870b94320db154de4702fe6bfb69ebc98fb531b78bf81a69b8ab658ba9d9af5"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x2bdf2906a7bbb398419246c3c77804a204641259b2aeb4f4a806eb772d31c480', '0', '0', '1753167600', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973712', '0x271745e26e3222352fce7052edef9adecba921f4315e48a9d55e46640ac324ce', '0x1870b94320db154de4702fe6bfb69ebc98fb531b78bf81a69b8ab658ba9d9af5', '{"parentHash":"0x1870b94320db154de4702fe6bfb69ebc98fb531b78bf81a69b8ab658ba9d9af5","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x1ec0026bd12fe29d710e5f04e605cdb715d68a2e5bac57416066a7bc6b298762","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77210","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36f1","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4208","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x271745e26e3222352fce7052edef9adecba921f4315e48a9d55e46640ac324ce"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x1ec0026bd12fe29d710e5f04e605cdb715d68a2e5bac57416066a7bc6b298762', '0', '0', '1753167601', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973713', '0xe193fc4298a6beebbc59b83cc7e2bbdace76c24fe9b7bd76aa415159ecb60914', '0x271745e26e3222352fce7052edef9adecba921f4315e48a9d55e46640ac324ce', '{"parentHash":"0x271745e26e3222352fce7052edef9adecba921f4315e48a9d55e46640ac324ce","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x7e29363a63f54a0e03e08cb515a98f3c416a5ade3ec15d29eddd262baf67a2a1","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77211","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36f2","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xe193fc4298a6beebbc59b83cc7e2bbdace76c24fe9b7bd76aa415159ecb60914"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x7e29363a63f54a0e03e08cb515a98f3c416a5ade3ec15d29eddd262baf67a2a1', '0', '0', '1753167602', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973714', '0x8e99d9242315a107492520e9255dd77798adbff81393d3de83960dac361bd838', '0xe193fc4298a6beebbc59b83cc7e2bbdace76c24fe9b7bd76aa415159ecb60914', '{"parentHash":"0xe193fc4298a6beebbc59b83cc7e2bbdace76c24fe9b7bd76aa415159ecb60914","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xd818ac5028fe5aa1abd2d3ffe4693b4e96eabad35e49011e2ce920bcd76d061a","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77212","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36f3","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x8e99d9242315a107492520e9255dd77798adbff81393d3de83960dac361bd838"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xd818ac5028fe5aa1abd2d3ffe4693b4e96eabad35e49011e2ce920bcd76d061a', '0', '0', '1753167603', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973715', '0x9ee9d75a25a912e7d3907679a1e588021f4d2040c71d2e1c958538466a4fbbd6', '0x8e99d9242315a107492520e9255dd77798adbff81393d3de83960dac361bd838', '{"parentHash":"0x8e99d9242315a107492520e9255dd77798adbff81393d3de83960dac361bd838","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x233fd85bd753e126e4df23a05c56ccde3eb6ec06ce2565a990af3347dc95b0c5","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77213","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36f4","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x9ee9d75a25a912e7d3907679a1e588021f4d2040c71d2e1c958538466a4fbbd6"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x233fd85bd753e126e4df23a05c56ccde3eb6ec06ce2565a990af3347dc95b0c5', '0', '0', '1753167604', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973716', '0x9b25094db21166930008728c487ca9dbbc1e842701c8573eaa6bea4d41c10a7e', '0x9ee9d75a25a912e7d3907679a1e588021f4d2040c71d2e1c958538466a4fbbd6', '{"parentHash":"0x9ee9d75a25a912e7d3907679a1e588021f4d2040c71d2e1c958538466a4fbbd6","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x12be357fcc1fc28e574a7f95a5f9b3aae7e18d8ab8829c676478b4e8953a8502","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77214","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36f5","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x9b25094db21166930008728c487ca9dbbc1e842701c8573eaa6bea4d41c10a7e"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x12be357fcc1fc28e574a7f95a5f9b3aae7e18d8ab8829c676478b4e8953a8502', '0', '0', '1753167605', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973717', '0xeaab0e07b40720f8e961f28ef665f08e67797428dc1eaccba88d5d4f60341284', '0x9b25094db21166930008728c487ca9dbbc1e842701c8573eaa6bea4d41c10a7e', '{"parentHash":"0x9b25094db21166930008728c487ca9dbbc1e842701c8573eaa6bea4d41c10a7e","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x7e49dc33343a54e9afc285155b8a35575e6924d465fe2dc543b5ea8915eb828a","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77215","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36f6","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xeaab0e07b40720f8e961f28ef665f08e67797428dc1eaccba88d5d4f60341284"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x7e49dc33343a54e9afc285155b8a35575e6924d465fe2dc543b5ea8915eb828a', '0', '0', '1753167606', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973718', '0xd6af3c7bf29f3689b516ed5c8fcf885a4e7eb2df751c35d4ebbb19fcc13628d4', '0xeaab0e07b40720f8e961f28ef665f08e67797428dc1eaccba88d5d4f60341284', '{"parentHash":"0xeaab0e07b40720f8e961f28ef665f08e67797428dc1eaccba88d5d4f60341284","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xf1a7db2e4f463fa87e3e65b73d2abc5374302855f6af9735d5a11c94c2d93975","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77216","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36f7","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xd6af3c7bf29f3689b516ed5c8fcf885a4e7eb2df751c35d4ebbb19fcc13628d4"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xf1a7db2e4f463fa87e3e65b73d2abc5374302855f6af9735d5a11c94c2d93975', '0', '0', '1753167607', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973719', '0x5815f5b91d53d0b5c7d423f06da7cad3d45edeab1c2b590af02ceebfd33b2ce1', '0xd6af3c7bf29f3689b516ed5c8fcf885a4e7eb2df751c35d4ebbb19fcc13628d4', '{"parentHash":"0xd6af3c7bf29f3689b516ed5c8fcf885a4e7eb2df751c35d4ebbb19fcc13628d4","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xb5d1f420ddc1edb60c7fc3a06929a2014c548d1ddd52a78ab6984faed53a09d1","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77217","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36f8","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x5815f5b91d53d0b5c7d423f06da7cad3d45edeab1c2b590af02ceebfd33b2ce1"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xb5d1f420ddc1edb60c7fc3a06929a2014c548d1ddd52a78ab6984faed53a09d1', '0', '0', '1753167608', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973720', '0x4d8bbd6a15515cacf18cf9810ca3867442cefc733e9cdeaa1527a008fbca3bd1', '0x5815f5b91d53d0b5c7d423f06da7cad3d45edeab1c2b590af02ceebfd33b2ce1', '{"parentHash":"0x5815f5b91d53d0b5c7d423f06da7cad3d45edeab1c2b590af02ceebfd33b2ce1","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xf4ca7c941e6ad6a780ad8422a817c6a7916f3f80b5f0d0f95cabcb17b0531299","transactionsRoot":"0x79c3ba4e0fe89ddea0ed8becdbfff86f18dab3ffd21eaf13744b86cb104d664e","receiptsRoot":"0xc8f88931c3c4ca18cb582e490d7acabfbe04fd6fa971549af6bf927aec7bfa1f","logsBloom":"0x00000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000200000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77218","gasLimit":"0x1312d00","gasUsed":"0x7623","timestamp":"0x687f36f9","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x4d8bbd6a15515cacf18cf9810ca3867442cefc733e9cdeaa1527a008fbca3bd1"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xf4ca7c941e6ad6a780ad8422a817c6a7916f3f80b5f0d0f95cabcb17b0531299', '1', '30243', '1753167609', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[{"type":0,"nonce":13627,"txHash":"0x539962b9584723f919b9f3a0b454622f5f51c195300564116d0cedfec17a1381","gas":30243,"gasPrice":"0xef426b","gasTipCap":"0xef426b","gasFeeCap":"0xef426b","from":"0x0000000000000000000000000000000000000000","to":"0xf07cc6482a24843efe7b42259acbaf8d0a2a6952","chainId":"0x8274f","value":"0x0","data":"0x91b7f5ed0000000000000000000000000000000000000000000018f4c5be1c1407000000","isCreate":false,"accessList":null,"authorizationList":null,"v":"0x104ec2","r":"0xaa309d7e218825160be9a87c9e50d3cbfead9c87e90e984ad0ea2441633092a2","s":"0x438f39c0af058794f320e5578720557af07c5397e363f9628a6c4ffee5bd2487"}]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973721', '0x84c53d922cfe0558c4be02865af5ebd49efe44a458dabc16aea5584e5e06f346', '0x4d8bbd6a15515cacf18cf9810ca3867442cefc733e9cdeaa1527a008fbca3bd1', '{"parentHash":"0x4d8bbd6a15515cacf18cf9810ca3867442cefc733e9cdeaa1527a008fbca3bd1","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xd4838ba86f5a8e865a41ef7547148b6074235a658dd57ff2296c0badda4760d1","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77219","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36fa","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x84c53d922cfe0558c4be02865af5ebd49efe44a458dabc16aea5584e5e06f346"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xd4838ba86f5a8e865a41ef7547148b6074235a658dd57ff2296c0badda4760d1', '0', '0', '1753167610', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973722', '0xe1d601522b08d98852b4c7dc3584f292ac246a3dac3c600ba58bd6c20c97be5b', '0x84c53d922cfe0558c4be02865af5ebd49efe44a458dabc16aea5584e5e06f346', '{"parentHash":"0x84c53d922cfe0558c4be02865af5ebd49efe44a458dabc16aea5584e5e06f346","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x8deede75e20423d0495cbdb493d320dddde6df0459df998608a16f658eb7bec3","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7721a","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36fb","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xe1d601522b08d98852b4c7dc3584f292ac246a3dac3c600ba58bd6c20c97be5b"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x8deede75e20423d0495cbdb493d320dddde6df0459df998608a16f658eb7bec3', '0', '0', '1753167611', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973723', '0x8579712fc434b401f1ecfcf3ae22611be054480fa882e90f8eecb6c5e97534bd', '0xe1d601522b08d98852b4c7dc3584f292ac246a3dac3c600ba58bd6c20c97be5b', '{"parentHash":"0xe1d601522b08d98852b4c7dc3584f292ac246a3dac3c600ba58bd6c20c97be5b","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xb4fe51cda0401bb19e8448a2697a49e1fbc25398c2b18a9955d0a8e6f4b153a7","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7721b","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36fc","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x8579712fc434b401f1ecfcf3ae22611be054480fa882e90f8eecb6c5e97534bd"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xb4fe51cda0401bb19e8448a2697a49e1fbc25398c2b18a9955d0a8e6f4b153a7', '0', '0', '1753167612', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973724', '0xe13a0b907e044a9df1952acc31dc08a578fb910a0cc224e11692cb84c9c9a9f7', '0x8579712fc434b401f1ecfcf3ae22611be054480fa882e90f8eecb6c5e97534bd', '{"parentHash":"0x8579712fc434b401f1ecfcf3ae22611be054480fa882e90f8eecb6c5e97534bd","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xcd17c85290d8ec7473357ebe1605f766af6c1356732cc7ad11de0453baca05c6","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7721c","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36fd","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xe13a0b907e044a9df1952acc31dc08a578fb910a0cc224e11692cb84c9c9a9f7"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xcd17c85290d8ec7473357ebe1605f766af6c1356732cc7ad11de0453baca05c6', '0', '0', '1753167613', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973725', '0x2e26fb489f8644b3b5c44cd493ebc140ba3bc716588f37a71b8ba6dc504ccb5f', '0xe13a0b907e044a9df1952acc31dc08a578fb910a0cc224e11692cb84c9c9a9f7', '{"parentHash":"0xe13a0b907e044a9df1952acc31dc08a578fb910a0cc224e11692cb84c9c9a9f7","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xfd321f4a3e2bc757df89162f730a2e37519dcb29cdb63019665c1fe4dbceeb00","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7721d","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36fe","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x2e26fb489f8644b3b5c44cd493ebc140ba3bc716588f37a71b8ba6dc504ccb5f"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xfd321f4a3e2bc757df89162f730a2e37519dcb29cdb63019665c1fe4dbceeb00', '0', '0', '1753167614', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973726', '0x313b0fbb7cbb8bc1ba4fbc50684b516d31f9f7ee6f66d919da01328537a4b0a1', '0x2e26fb489f8644b3b5c44cd493ebc140ba3bc716588f37a71b8ba6dc504ccb5f', '{"parentHash":"0x2e26fb489f8644b3b5c44cd493ebc140ba3bc716588f37a71b8ba6dc504ccb5f","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x1a24ed5ee5e8ca354f583b28bd7f2c4c6fe4dca59fef476578eddab17b857471","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7721e","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36ff","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x313b0fbb7cbb8bc1ba4fbc50684b516d31f9f7ee6f66d919da01328537a4b0a1"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x1a24ed5ee5e8ca354f583b28bd7f2c4c6fe4dca59fef476578eddab17b857471', '0', '0', '1753167615', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973727', '0xf9039c9c24ab919066f2eb6f97360cfb727ed032c9e6142ea45e784b19894560', '0x313b0fbb7cbb8bc1ba4fbc50684b516d31f9f7ee6f66d919da01328537a4b0a1', '{"parentHash":"0x313b0fbb7cbb8bc1ba4fbc50684b516d31f9f7ee6f66d919da01328537a4b0a1","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x2927f53f1eaaeaa17a80f048f10474a7cc3b2c96547cc47caad33ff9e5b38da6","transactionsRoot":"0x80fd441b38b6ffb8f9369d8a5179356f9bf5ad332db0da99f7c6efdb90939cd2","receiptsRoot":"0xa262cee7ba62c004c6554e9cf378512a868346c24f8cafc1ac1954250339149e","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000900000000000000000000000000000000000000000000000000000000000000000000000001000000008000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000020000000000000000000","difficulty":"0x1","number":"0xa7721f","gasLimit":"0x1312d00","gasUsed":"0x9642","timestamp":"0x687f3700","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xf9039c9c24ab919066f2eb6f97360cfb727ed032c9e6142ea45e784b19894560"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x2927f53f1eaaeaa17a80f048f10474a7cc3b2c96547cc47caad33ff9e5b38da6', '1', '38466', '1753167616', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[{"type":2,"nonce":3392501,"txHash":"0xa5231ea1b94eb516575807531763b312d250ee5ad4dfbeea66beab5f448c32b6","gas":45919,"gasPrice":"0x1de8472","gasTipCap":"0x64","gasFeeCap":"0x1de8472","from":"0x0000000000000000000000000000000000000000","to":"0x5300000000000000000000000000000000000002","chainId":"0x8274f","value":"0x0","data":"0x39455d3a000000000000000000000000000000000000000000000000000000000004580f0000000000000000000000000000000000000000000000000000000000000001","isCreate":false,"accessList":[{"address":"0x5300000000000000000000000000000000000003","storageKeys":["0x297c59f20c6b2556a4ed35dccabbdeb8b1cf950f62aefb86b98d19b5a4aff2a2"]}],"authorizationList":null,"v":"0x1","r":"0xa09a97c38c7a58f40ff39ca74f938c63f1ef822cf91926d4fff96b7dc818d3f3","s":"0x77ee7453096794d9cbb206f26077f23b4cc88fe51893cb5eab46714e379ac833"}]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973728', '0xf27ff9223f6bf9a964737d50cb7c005f049cf0f4edfd16d24178a798c21716d6', '0xf9039c9c24ab919066f2eb6f97360cfb727ed032c9e6142ea45e784b19894560', '{"parentHash":"0xf9039c9c24ab919066f2eb6f97360cfb727ed032c9e6142ea45e784b19894560","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x0dbe54818526afaabbce83765eabcd4ec4d437a3497e5d046d599af862ea9850","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77220","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f3701","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xf27ff9223f6bf9a964737d50cb7c005f049cf0f4edfd16d24178a798c21716d6"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x0dbe54818526afaabbce83765eabcd4ec4d437a3497e5d046d599af862ea9850', '0', '0', '1753167617', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973729', '0x2b7777eb3ffe5939d6b70883cef69250ef5a2ed62a8b378973e0c3fe84707137', '0xf27ff9223f6bf9a964737d50cb7c005f049cf0f4edfd16d24178a798c21716d6', '{"parentHash":"0xf27ff9223f6bf9a964737d50cb7c005f049cf0f4edfd16d24178a798c21716d6","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xb89ed319fb9dcaed2df7e72223683cf255f6c1e45742e6caa810938871ce53bf","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77221","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f3702","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x2b7777eb3ffe5939d6b70883cef69250ef5a2ed62a8b378973e0c3fe84707137"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xb89ed319fb9dcaed2df7e72223683cf255f6c1e45742e6caa810938871ce53bf', '0', '0', '1753167618', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973730', '0x56318f0a941611fc22640ea7f7d0308ab88a9e23059b5c6983bafc2402003d13', '0x2b7777eb3ffe5939d6b70883cef69250ef5a2ed62a8b378973e0c3fe84707137', '{"parentHash":"0x2b7777eb3ffe5939d6b70883cef69250ef5a2ed62a8b378973e0c3fe84707137","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xe603d341e958521d3f5df8f37b5144b3c003214c481716cffa4e8d6303d9734f","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77222","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f3703","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x56318f0a941611fc22640ea7f7d0308ab88a9e23059b5c6983bafc2402003d13"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xe603d341e958521d3f5df8f37b5144b3c003214c481716cffa4e8d6303d9734f', '0', '0', '1753167619', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
|
||||
-- +goose StatementEnd
|
||||
-- +goose Down
|
||||
-- +goose StatementBegin
|
||||
DELETE FROM l2_block;
|
||||
-- +goose StatementEnd
|
||||
46
tests/prover-e2e/Makefile
Normal file
46
tests/prover-e2e/Makefile
Normal file
@@ -0,0 +1,46 @@
|
||||
.PHONY: clean setup_db test_tool all check_vars
|
||||
|
||||
GOOSE_CMD?=goose
|
||||
|
||||
|
||||
all: setup_db test_tool import_data
|
||||
|
||||
clean:
|
||||
docker compose down
|
||||
|
||||
check_vars:
|
||||
@if [ -z "$(BEGIN_BLOCK)" ] || [ -z "$(END_BLOCK)" ]; then \
|
||||
echo "Error: BEGIN_BLOCK and END_BLOCK must be defined"; \
|
||||
echo "Usage: make import_data BEGIN_BLOCK=<start_block> END_BLOCK=<end_block>"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
setup_db: clean
|
||||
docker compose up --detach
|
||||
@echo "Waiting for PostgreSQL to be ready..."
|
||||
@for i in $$(seq 1 30); do \
|
||||
if nc -z localhost 5432 >/dev/null 2>&1; then \
|
||||
echo "PostgreSQL port is open!"; \
|
||||
sleep 2; \
|
||||
break; \
|
||||
fi; \
|
||||
echo "Waiting for PostgreSQL to start... ($$i/30)"; \
|
||||
sleep 2; \
|
||||
if [ $$i -eq 30 ]; then \
|
||||
echo "Timed out waiting for PostgreSQL to start"; \
|
||||
exit 1; \
|
||||
fi; \
|
||||
done
|
||||
${GOOSE_CMD} up
|
||||
GOOSE_MIGRATION_DIR=./ ${GOOSE_CMD} up-to 100
|
||||
|
||||
test_tool:
|
||||
go build -o $(PWD)/build/bin/e2e_tool ../../rollup/tests/integration_tool
|
||||
|
||||
build/bin/e2e_tool: test_tool
|
||||
|
||||
import_data_euclid: build/bin/e2e_tool check_vars
|
||||
build/bin/e2e_tool --config ./config.json --codec 7 ${BEGIN_BLOCK} ${END_BLOCK}
|
||||
|
||||
import_data: build/bin/e2e_tool check_vars
|
||||
build/bin/e2e_tool --config ./config.json --codec 8 ${BEGIN_BLOCK} ${END_BLOCK}
|
||||
12
tests/prover-e2e/README.md
Normal file
12
tests/prover-e2e/README.md
Normal file
@@ -0,0 +1,12 @@
|
||||
## A new e2e test tool to setup a local environment for testing coordinator and prover.
|
||||
|
||||
It contains data from some blocks in scroll sepolia, and helps to generate a series of chunks/batches/bundles from these blocks, filling the DB for the coordinator, so an e2e test (from chunk to bundle) can be run completely local
|
||||
|
||||
Steps:
|
||||
1. run `make all` under `tests/prover-e2e`, it would launch a postgreSql db in local docker container, which is ready to be used by coordinator (include some chunks/batches/bundles waiting to be proven)
|
||||
2. download circuit assets with `download-release.sh` script in `zkvm-prover`
|
||||
3. generate the verifier stuff corresponding to the downloaded assets by `make gen_verifier_stuff` in `zkvm-prover`
|
||||
4. setup `config.json` and `genesis.json` for coordinator, copy the generated verifier stuff in step 3 to the directory which coordinator would load them
|
||||
5. build and launch `coordinator_api` service locally
|
||||
6. setup the `config.json` for zkvm prover to connect with the locally launched coordinator api
|
||||
7. in `zkvm-prover`, launch `make test_e2e_run`, which would specific prover run locally, connect to the local coordinator api service according to the `config.json`, and prove all tasks being injected to db in step 1.
|
||||
8
tests/prover-e2e/config.json
Normal file
8
tests/prover-e2e/config.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"db_config": {
|
||||
"driver_name": "postgres",
|
||||
"dsn": "postgres://dev:dev@localhost:5432/scroll?sslmode=disable",
|
||||
"maxOpenNum": 5,
|
||||
"maxIdleNum": 1
|
||||
}
|
||||
}
|
||||
22
tests/prover-e2e/docker-compose.yml
Normal file
22
tests/prover-e2e/docker-compose.yml
Normal file
@@ -0,0 +1,22 @@
|
||||
# docker-compose.yml
|
||||
# This configuration is for local debugging only.
|
||||
# - PostgreSQL is bound to localhost (127.0.0.1) and not exposed externally.
|
||||
# - Data is persisted to ./db relative to current directory.
|
||||
# - No production security settings are applied.
|
||||
# The access url is postgresql://dev:dev@localhost:5432/devdb
|
||||
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres
|
||||
container_name: local_postgres
|
||||
environment:
|
||||
POSTGRES_USER: dev
|
||||
POSTGRES_PASSWORD: dev
|
||||
POSTGRES_DB: scroll
|
||||
ports:
|
||||
- "127.0.0.1:5432:5432" # Listen only on localhost
|
||||
# volumes:
|
||||
# - ./db:/var/lib/postgresql/data # Persist data to local ./db
|
||||
restart: unless-stopped
|
||||
27
tests/prover-e2e/prepare/dump_block_records.sql
Normal file
27
tests/prover-e2e/prepare/dump_block_records.sql
Normal file
@@ -0,0 +1,27 @@
|
||||
-- Create a file with INSERT statements for the specific records
|
||||
\o block_export.sql
|
||||
\t on
|
||||
\a
|
||||
SELECT 'INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES (' ||
|
||||
quote_literal(number) || ', ' ||
|
||||
quote_literal(hash) || ', ' ||
|
||||
quote_literal(parent_hash) || ', ' ||
|
||||
quote_literal(header) || ', ' ||
|
||||
quote_literal(withdraw_root) || ', ' ||
|
||||
quote_literal(state_root) || ', ' ||
|
||||
quote_literal(tx_num) || ', ' ||
|
||||
quote_literal(gas_used) || ', ' ||
|
||||
quote_literal(block_timestamp) || ', ' ||
|
||||
quote_literal(row_consumption) || ', ' ||
|
||||
quote_literal(chunk_hash) || ', ' ||
|
||||
quote_literal(transactions) ||
|
||||
');'
|
||||
FROM l2_block
|
||||
WHERE number >= 10973700 and number <= 10973730
|
||||
ORDER BY number ASC;
|
||||
\t off
|
||||
\a
|
||||
\o
|
||||
@@ -1,10 +1,8 @@
|
||||
.PHONY: prover lint tests_binary
|
||||
.PHONY: prover prover_cpu lint tests_binary test_e2e_run test_run
|
||||
|
||||
RUST_MIN_STACK ?= 16777216
|
||||
export RUST_MIN_STACK
|
||||
|
||||
CIRCUIT_STUFF = .work/euclid/chunk/app.vmexe .work/feynman/chunk/app.vmexe
|
||||
|
||||
ifeq (4.3,$(firstword $(sort $(MAKE_VERSION) 4.3)))
|
||||
PLONKY3_VERSION=$(shell grep -m 1 "Plonky3.git" ../Cargo.lock | cut -d "#" -f2 | cut -c-7)
|
||||
else
|
||||
@@ -34,15 +32,20 @@ endif
|
||||
|
||||
ZK_VERSION=${ZKVM_COMMIT}-${PLONKY3_VERSION}
|
||||
|
||||
E2E_HANDLE_SET = ../tests/prover-e2e/testset.json
|
||||
DUMP_DIR = .work
|
||||
E2E_HANDLE_SET ?= ../tests/prover-e2e/testset.json
|
||||
DUMP_DIR ?= .work
|
||||
|
||||
prover:
|
||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZKVM_COMMIT=${ZKVM_COMMIT} $(MAKE) -C ../crates/gpu_override build
|
||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build --locked --release -Z unstable-options --lockfile-path ../crates/gpu_override/Cargo.lock -p prover
|
||||
|
||||
version:
|
||||
echo ${GO_TAG}-${GIT_REV}-${ZK_VERSION}
|
||||
|
||||
prover_cpu:
|
||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build --locked --release -p prover
|
||||
|
||||
clean:
|
||||
cargo clean -Z unstable-options --release -p prover --lockfile-path ../crates/gpu_override/Cargo.lock
|
||||
|
||||
tests_binary:
|
||||
cargo clean && cargo test --release --no-run
|
||||
@@ -53,16 +56,10 @@ lint:
|
||||
cargo clippy --all-features --all-targets -- -D warnings
|
||||
cargo fmt --all
|
||||
|
||||
$(CIRCUIT_STUFF):
|
||||
@echo "Download stuff with download-release.sh, and put them into correct directory";
|
||||
@exit 1;
|
||||
|
||||
test_run: $(CIRCUIT_STUFF)
|
||||
test_run:
|
||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo run --release -p prover -- --config ./config.json
|
||||
|
||||
test_e2e_run: $(CIRCUIT_STUFF) ${E2E_HANDLE_SET}
|
||||
test_e2e_run: ${E2E_HANDLE_SET}
|
||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo run --release -p prover -- --config ./config.json handle ${E2E_HANDLE_SET}
|
||||
|
||||
gen_verifier_stuff:
|
||||
mkdir -p ${DUMP_DIR}
|
||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo run --release -p prover -- --config ./config.json --forkname feynman dump ${DUMP_DIR}
|
||||
|
||||
|
||||
@@ -24,13 +24,10 @@
|
||||
"db_path": ".work/db"
|
||||
},
|
||||
"circuits": {
|
||||
"euclidV2": {
|
||||
"hard_fork_name": "euclidV2",
|
||||
"workspace_path": ".work/euclid"
|
||||
},
|
||||
"feynman": {
|
||||
"hard_fork_name": "feynman",
|
||||
"workspace_path": ".work/feynman1"
|
||||
},
|
||||
"base_url": "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/feynman/",
|
||||
"workspace_path": ".work/feynman"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,7 @@
|
||||
# Define version mapping
|
||||
declare -A VERSION_MAP
|
||||
VERSION_MAP["euclid"]="0.4.3"
|
||||
VERSION_MAP["feynman"]="0.5.0rc1"
|
||||
VERSION_MAP["feynman"]="0.5.2"
|
||||
|
||||
# release version
|
||||
if [ -z "${SCROLL_ZKVM_VERSION}" ]; then
|
||||
@@ -14,7 +14,7 @@ if [ -z "${SCROLL_ZKVM_VERSION}" ]; then
|
||||
echo "Setting SCROLL_ZKVM_VERSION to ${SCROLL_ZKVM_VERSION} based on '$1' argument"
|
||||
else
|
||||
# Default version if no argument or not recognized
|
||||
SCROLL_ZKVM_VERSION=0.5.0rc0
|
||||
SCROLL_ZKVM_VERSION=0.5.2
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
Reference in New Issue
Block a user