Compare commits

..

2 Commits

Author SHA1 Message Date
Sebastien Baizet
add7de93e6 ci: coordinator-api support arm64 2024-06-17 10:31:05 +02:00
Sebastien Baizet
9fe32095b0 Revert "remove error in github workflow"
This reverts commit 617bed64a8.
2024-06-17 10:29:40 +02:00
27 changed files with 84 additions and 525 deletions

View File

@@ -1,41 +0,0 @@
name: Docker-coordinator-api-arm64
on:
workflow_dispatch:
inputs:
tag:
description: "tag of this image (suffix -arm64 is added automatically)"
required: true
type: string
jobs:
build-and-push-arm64-image:
runs-on: ubuntu-latest
strategy:
matrix:
arch:
- aarch64
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up QEMU
run: |
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
docker buildx create --name multiarch --driver docker-container --use
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build docker image
uses: docker/build-push-action@v2
with:
platforms: linux/arm64
context: .
file: ./build/dockerfiles/coordinator-api.Dockerfile
push: true
tags: scrolltech/coordinator-api:${{inputs.tag}}-arm64

View File

@@ -39,6 +39,7 @@ jobs:
aws --region ${{ env.AWS_REGION }} ecr describe-repositories --repository-names ${{ env.REPOSITORY }} && : || aws --region ${{ env.AWS_REGION }} ecr create-repository --repository-name ${{ env.REPOSITORY }}
- name: Build and push
uses: docker/build-push-action@v3
platforms: linux/amd64,linux/arm64
env:
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
REPOSITORY: event-watcher
@@ -361,6 +362,7 @@ jobs:
with:
context: .
file: ./build/dockerfiles/coordinator-api.Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}

View File

@@ -12,6 +12,7 @@ update:
cd $(PWD)/common/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG}&& go mod tidy
cd $(PWD)/coordinator/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG} && go mod tidy
cd $(PWD)/database/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG} && go mod tidy
cd $(PWD)/prover/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG}&& go mod tidy
cd $(PWD)/rollup/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG} && go mod tidy
cd $(PWD)/tests/integration-test/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG} && go mod tidy
@@ -20,6 +21,7 @@ lint: ## The code's format and security checks.
make -C common lint
make -C coordinator lint
make -C database lint
make -C prover lint
make -C bridge-history-api lint
fmt: ## format the code
@@ -28,6 +30,7 @@ fmt: ## format the code
cd $(PWD)/common/ && go mod tidy
cd $(PWD)/coordinator/ && go mod tidy
cd $(PWD)/database/ && go mod tidy
cd $(PWD)/prover/ && go mod tidy
cd $(PWD)/rollup/ && go mod tidy
cd $(PWD)/tests/integration-test/ && go mod tidy
@@ -35,6 +38,7 @@ fmt: ## format the code
goimports -local $(PWD)/common/ -w .
goimports -local $(PWD)/coordinator/ -w .
goimports -local $(PWD)/database/ -w .
goimports -local $(PWD)/prover/ -w .
goimports -local $(PWD)/rollup/ -w .
goimports -local $(PWD)/tests/integration-test/ -w .

View File

@@ -147,7 +147,7 @@ func (b *EventUpdateLogic) updateL2WithdrawMessageInfos(ctx context.Context, bat
}
if withdrawTrie.NextMessageNonce != l2WithdrawMessages[0].MessageNonce {
log.Error("nonce mismatch", "expected next message nonce", withdrawTrie.NextMessageNonce, "actual next message nonce", l2WithdrawMessages[0].MessageNonce)
log.Error("nonce mismatch", "expected next message nonce", withdrawTrie.NextMessageNonce, "actuall next message nonce", l2WithdrawMessages[0].MessageNonce)
return fmt.Errorf("nonce mismatch")
}

View File

@@ -31,7 +31,7 @@ dependencies = [
[[package]]
name = "aggregator"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
@@ -537,7 +537,7 @@ checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1"
[[package]]
name = "bus-mapping"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"eth-types",
"ethers-core",
@@ -1126,7 +1126,7 @@ dependencies = [
[[package]]
name = "eth-types"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"base64 0.13.1",
"ethers-core",
@@ -1283,7 +1283,7 @@ dependencies = [
[[package]]
name = "external-tracer"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"eth-types",
"geth-utils",
@@ -1465,7 +1465,7 @@ dependencies = [
[[package]]
name = "gadgets"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"eth-types",
"halo2_proofs",
@@ -1488,7 +1488,7 @@ dependencies = [
[[package]]
name = "geth-utils"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"env_logger 0.10.0",
"gobuild",
@@ -2237,7 +2237,7 @@ dependencies = [
[[package]]
name = "mock"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"eth-types",
"ethers-core",
@@ -2252,7 +2252,7 @@ dependencies = [
[[package]]
name = "mpt-zktrie"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"eth-types",
"halo2curves",
@@ -2724,7 +2724,7 @@ dependencies = [
[[package]]
name = "prover"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"aggregator",
"anyhow",
@@ -4361,7 +4361,7 @@ dependencies = [
[[package]]
name = "zkevm-circuits"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"array-init",
"bus-mapping",

View File

@@ -25,7 +25,7 @@ bls12_381 = { git = "https://github.com/scroll-tech/bls12_381", branch = "feat/i
[dependencies]
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.4", default-features = false, features = ["parallel_syn", "scroll"] }
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.1", default-features = false, features = ["parallel_syn", "scroll"] }
base64 = "0.13.0"
env_logger = "0.9.0"

View File

@@ -5,7 +5,7 @@ import (
"runtime/debug"
)
var tag = "v4.4.22"
var tag = "v4.4.18"
var commit = func() string {
if info, ok := debug.ReadBuildInfo(); ok {

View File

@@ -38,7 +38,7 @@ Mapping from L2 ERC20 token address to corresponding L2ERC20Gateway.
function defaultERC20Gateway() external view returns (address)
```
The address of default L2 ERC20 gateway, normally the L2StandardERC20Gateway contract.
The addess of default L2 ERC20 gateway, normally the L2StandardERC20Gateway contract.

View File

@@ -945,7 +945,7 @@ error ErrorRevertFinalizedBatch()
*Thrown when reverting a finalized batch.*
*Thrown when reverting a finialized batch.*
### ErrorRevertNotStartFromEnd
@@ -956,7 +956,7 @@ error ErrorRevertNotStartFromEnd()
*Thrown when the reverted batches are not in the ending of committed batch chain.*
*Thrown when the reverted batches are not in the ending of commited batch chain.*
### ErrorRevertZeroBatches

View File

@@ -44,7 +44,7 @@ contract L1ScrollMessenger is ScrollMessengerBase, IL1ScrollMessenger {
struct ReplayState {
// The number of replayed times.
uint128 times;
// The queue index of latest replayed one. If it is zero, it means the message has not been replayed.
// The queue index of lastest replayed one. If it is zero, it means the message has not been replayed.
uint128 lastIndex;
}

View File

@@ -167,7 +167,7 @@ abstract contract L1ERC20Gateway is IL1ERC20Gateway, IMessageDropCallback, Scrol
/// @dev Internal function to do all the deposit operations.
///
/// @param _token The token to deposit.
/// @param _to The recipient address to receive the token in L2.
/// @param _to The recipient address to recieve the token in L2.
/// @param _amount The amount of token to deposit.
/// @param _data Optional data to forward to recipient's account.
/// @param _gasLimit Gas limit required to complete the deposit on L2.

View File

@@ -25,7 +25,7 @@ contract L1GatewayRouter is OwnableUpgradeable, IL1GatewayRouter {
/// @notice The address of L1ETHGateway.
address public ethGateway;
/// @notice The address of default ERC20 gateway, normally the L1StandardERC20Gateway contract.
/// @notice The addess of default ERC20 gateway, normally the L1StandardERC20Gateway contract.
address public defaultERC20Gateway;
/// @notice Mapping from ERC20 token address to corresponding L1ERC20Gateway.

View File

@@ -97,7 +97,7 @@ contract L1StandardERC20Gateway is L1ERC20Gateway {
/// @inheritdoc IL1ERC20Gateway
function getL2ERC20Address(address _l1Token) public view override returns (address) {
// In StandardERC20Gateway, all corresponding l2 tokens are deployed by Create2 with salt,
// In StandardERC20Gateway, all corresponding l2 tokens are depoyed by Create2 with salt,
// we can calculate the l2 address directly.
bytes32 _salt = keccak256(abi.encodePacked(counterpart, keccak256(abi.encodePacked(_l1Token))));

View File

@@ -89,10 +89,10 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
/// @dev Thrown when the number of batches to revert is zero.
error ErrorRevertZeroBatches();
/// @dev Thrown when the reverted batches are not in the ending of committed batch chain.
/// @dev Thrown when the reverted batches are not in the ending of commited batch chain.
error ErrorRevertNotStartFromEnd();
/// @dev Thrown when reverting a finalized batch.
/// @dev Thrown when reverting a finialized batch.
error ErrorRevertFinalizedBatch();
/// @dev Thrown when the given state root is zero.
@@ -580,7 +580,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
/// @param _chunks The list of chunks to commit.
/// @param _skippedL1MessageBitmap The bitmap indicates whether each L1 message is skipped or not.
/// @return _batchDataHash The computed data hash for the list of chunks.
/// @return _totalL1MessagesPoppedInBatch The total number of L1 messages popped in this batch, including skipped one.
/// @return _totalL1MessagesPoppedInBatch The total number of L1 messages poped in this batch, including skipped one.
function _commitChunksV0(
uint256 _totalL1MessagesPoppedOverall,
bytes[] memory _chunks,
@@ -627,7 +627,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
/// @param _skippedL1MessageBitmap The bitmap indicates whether each L1 message is skipped or not.
/// @return _blobVersionedHash The blob versioned hash for the blob carried in this transaction.
/// @return _batchDataHash The computed data hash for the list of chunks.
/// @return _totalL1MessagesPoppedInBatch The total number of L1 messages popped in this batch, including skipped one.
/// @return _totalL1MessagesPoppedInBatch The total number of L1 messages poped in this batch, including skipped one.
function _commitChunksV1(
uint256 _totalL1MessagesPoppedOverall,
bytes[] memory _chunks,
@@ -950,7 +950,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
/// @dev Internal function to pop finalized l1 messages.
/// @param bitmapPtr The memory offset of `skippedL1MessageBitmap`.
/// @param totalL1MessagePopped The total number of L1 messages popped in all batches including current batch.
/// @param totalL1MessagePopped The total number of L1 messages poped in all batches including current batch.
/// @param l1MessagePopped The number of L1 messages popped in current batch.
function _popL1Messages(
uint256 bitmapPtr,

View File

@@ -21,7 +21,7 @@ contract L2GatewayRouter is OwnableUpgradeable, IL2GatewayRouter {
/// @notice The address of L2ETHGateway.
address public ethGateway;
/// @notice The address of default L2 ERC20 gateway, normally the L2StandardERC20Gateway contract.
/// @notice The addess of default L2 ERC20 gateway, normally the L2StandardERC20Gateway contract.
address public defaultERC20Gateway;
/// @notice Mapping from L2 ERC20 token address to corresponding L2ERC20Gateway.

View File

@@ -242,7 +242,7 @@ contract L1BlockContainer is OwnableBase, IL1BlockContainer {
let _computedBlockHash := keccak256(memPtr, headerPayloadLength)
require(eq(_blockHash, _computedBlockHash), "Block hash mismatch")
// load 16 values
// load 16 vaules
for {
let i := 0
} lt(i, 16) {

View File

@@ -23,7 +23,7 @@ contract ScrollStandardERC20Factory is Ownable, IScrollStandardERC20Factory {
/// @inheritdoc IScrollStandardERC20Factory
function computeL2TokenAddress(address _gateway, address _l1Token) external view returns (address) {
// In StandardERC20Gateway, all corresponding l2 tokens are deployed by Create2 with salt,
// In StandardERC20Gateway, all corresponding l2 tokens are depoyed by Create2 with salt,
// we can calculate the l2 address directly.
bytes32 _salt = _getSalt(_gateway, _l1Token);

View File

@@ -369,7 +369,7 @@ library PatriciaMerkleTrieVerifier {
// first item is considered the root node.
// Otherwise verifies that the hash of the current node
// is the same as the previous chosen one.
// is the same as the previous choosen one.
switch i
case 1 {
rootHash := hash
@@ -425,7 +425,7 @@ library PatriciaMerkleTrieVerifier {
}
}
// lastly, derive the path of the chosen one (TM)
// lastly, derive the path of the choosen one (TM)
path := derivePath(key, depth)
}

View File

@@ -113,7 +113,7 @@ library ZkTrieVerifier {
// first item is considered the root node.
// Otherwise verifies that the hash of the current node
// is the same as the previous chosen one.
// is the same as the previous choosen one.
switch depth
case 1 {
rootHash := hash
@@ -262,7 +262,7 @@ library ZkTrieVerifier {
ptr, storageValue := verifyStorageProof(poseidon, storageKey, storageRootHash, ptr)
// the one and only boundary check
// in case an attacker crafted a malicious payload
// in case an attacker crafted a malicous payload
// and succeeds in the prior verification steps
// then this should catch any bogus accesses
if iszero(eq(ptr, add(proof.offset, proof.length))) {

View File

@@ -242,28 +242,21 @@ func (bp *BatchProverTask) assignWithTwoCircuits(ctx *gin.Context, taskCtx *prov
chunkRanges [2]*chunkIndexRange
err error
)
var chunkRange *chunkIndexRange
for i := 0; i < 2; i++ {
hardForkNames[i] = bp.reverseVkMap[getTaskParameter.VKs[i]]
chunkRanges[i], err = bp.getChunkRangeByName(ctx, hardForkNames[i])
if err != nil {
return nil, err
}
if chunkRanges[i] != nil {
if chunkRange == nil {
chunkRange = chunkRanges[i]
} else {
chunkRange = chunkRange.merge(*chunkRanges[i])
}
if chunkRanges[i] == nil {
return nil, nil
}
}
if chunkRange == nil {
return nil, nil
}
chunkRange := chunkRanges[0].merge(*chunkRanges[1])
var hardForkName string
getHardForkName := func(batch *orm.Batch) (string, error) {
for i := 0; i < 2; i++ {
if chunkRanges[i] != nil && chunkRanges[i].contains(batch.StartChunkIndex, batch.EndChunkIndex) {
if chunkRanges[i].contains(batch.StartChunkIndex, batch.EndChunkIndex) {
hardForkName = hardForkNames[i]
break
}

33
prover/Cargo.lock generated
View File

@@ -59,12 +59,12 @@ dependencies = [
[[package]]
name = "aggregator"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
"c-kzg",
"ctor 0.1.26",
"ctor",
"encoder",
"env_logger 0.10.2",
"eth-types 0.11.0",
@@ -661,7 +661,7 @@ dependencies = [
[[package]]
name = "bus-mapping"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"eth-types 0.11.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -1036,16 +1036,6 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "ctor"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f"
dependencies = [
"quote",
"syn 2.0.66",
]
[[package]]
name = "ctr"
version = "0.9.2"
@@ -1391,7 +1381,7 @@ dependencies = [
[[package]]
name = "eth-types"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"base64 0.13.1",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -1625,7 +1615,7 @@ dependencies = [
[[package]]
name = "external-tracer"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"eth-types 0.11.0",
"geth-utils 0.11.0",
@@ -1863,7 +1853,7 @@ dependencies = [
[[package]]
name = "gadgets"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"eth-types 0.11.0",
"halo2_proofs",
@@ -1896,7 +1886,7 @@ dependencies = [
[[package]]
name = "geth-utils"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"env_logger 0.10.2",
"gobuild",
@@ -2795,7 +2785,7 @@ dependencies = [
[[package]]
name = "mock"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"eth-types 0.11.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -2825,7 +2815,7 @@ dependencies = [
[[package]]
name = "mpt-zktrie"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"eth-types 0.11.0",
"halo2curves",
@@ -3388,7 +3378,6 @@ dependencies = [
"anyhow",
"base64 0.13.1",
"clap",
"ctor 0.2.8",
"env_logger 0.11.3",
"eth-keystore",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -3451,7 +3440,7 @@ dependencies = [
[[package]]
name = "prover"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"aggregator 0.11.0",
"anyhow",
@@ -5595,7 +5584,7 @@ dependencies = [
[[package]]
name = "zkevm-circuits"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
dependencies = [
"array-init",
"bus-mapping 0.11.0",

View File

@@ -30,7 +30,7 @@ ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branc
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "v0.10", default-features = false, features = ["parallel_syn", "scroll", "shanghai"] }
prover_next = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.4", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
prover_next = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.1", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
base64 = "0.13.1"
reqwest = { version = "0.12.4", features = ["gzip"] }
reqwest-middleware = "0.3"
@@ -45,4 +45,3 @@ tokio = "1.37.0"
sled = "0.34.7"
http = "1.1.0"
clap = { version = "4.5", features = ["derive"] }
ctor = "0.2.8"

View File

@@ -1,4 +1,4 @@
.PHONY: prover lint tests_binary
.PHONY: prover
ifeq (4.3,$(firstword $(sort $(MAKE_VERSION) 4.3)))
HALO2_VERSION=$(shell grep -m 1 "halo2.git" ./Cargo.lock | cut -d "#" -f2 | cut -c-7)
@@ -38,15 +38,4 @@ endif
prover:
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build --release
rm -rf ./lib && mkdir ./lib
find target/ -name "libzktrie.so" | xargs -I{} cp {} ./lib
tests_binary:
cargo clean && cargo test --release --no-run
ls target/release/deps/prover* | grep -v "\.d" | xargs -I{} ln -sf {} ./prover.test
rm -rf ./lib && mkdir ./lib
find target/ -name "libzktrie.so" | xargs -I{} cp {} ./lib
lint:
cargo check --all-features
cargo clippy --all-features --all-targets -- -D warnings
cargo fmt --all
find target/ -name "libzktrie.so" | xargs -I{} cp {} ./lib

View File

@@ -1,5 +1,3 @@
#![feature(lazy_cell)]
mod config;
mod coordinator_client;
mod geth_client;

View File

@@ -6,8 +6,8 @@ use serde::Deserialize;
use crate::types::{CommonHash, Task};
use prover::{
aggregator::Prover as BatchProver, zkevm::Prover as ChunkProver, BatchProof, BlockTrace,
ChunkHash, ChunkProof,
aggregator::Prover as BatchProver, zkevm::Prover as ChunkProver, BlockTrace, ChunkHash,
ChunkProof,
};
use std::{cell::RefCell, cmp::Ordering, env, rc::Rc};
@@ -60,7 +60,8 @@ impl BaseCircuitsHandler {
}
}
fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
fn gen_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
let chunk_trace = self.gen_chunk_traces(task)?;
if let Some(prover) = self.chunk_prover.as_ref() {
let chunk_proof = prover.borrow_mut().gen_chunk_proof(
chunk_trace,
@@ -69,50 +70,34 @@ impl BaseCircuitsHandler {
self.get_output_dir(),
)?;
return Ok(chunk_proof);
return serde_json::to_string(&chunk_proof).map_err(|e| anyhow::anyhow!(e));
}
unreachable!("please check errors in proof_type logic")
}
fn gen_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
let chunk_trace = self.gen_chunk_traces(task)?;
let chunk_proof = self.gen_chunk_proof_raw(chunk_trace)?;
Ok(serde_json::to_string(&chunk_proof)?)
}
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
let chunk_hashes_proofs: Vec<(ChunkHash, ChunkProof)> =
self.gen_chunk_hashes_proofs(task)?;
let chunk_proofs: Vec<ChunkProof> =
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
fn gen_batch_proof_raw(
&self,
chunk_hashes_proofs: Vec<(ChunkHash, ChunkProof)>,
) -> Result<BatchProof> {
if let Some(prover) = self.batch_prover.as_ref() {
let chunk_proofs: Vec<ChunkProof> =
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
let is_valid = prover.borrow_mut().check_chunk_proofs(&chunk_proofs);
if !is_valid {
bail!("non-match chunk protocol")
bail!("non-match chunk protocol, task-id: {}", &task.id)
}
let batch_proof = prover.borrow_mut().gen_agg_evm_proof(
chunk_hashes_proofs,
None,
self.get_output_dir(),
)?;
return Ok(batch_proof);
return serde_json::to_string(&batch_proof).map_err(|e| anyhow::anyhow!(e));
}
unreachable!("please check errors in proof_type logic")
}
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
log::info!("[circuit] gen_batch_proof for task {}", task.id);
let chunk_hashes_proofs: Vec<(ChunkHash, ChunkProof)> =
self.gen_chunk_hashes_proofs(task)?;
let batch_proof = self.gen_batch_proof_raw(chunk_hashes_proofs)?;
Ok(serde_json::to_string(&batch_proof)?)
}
fn get_output_dir(&self) -> Option<&str> {
OUTPUT_DIR.as_deref()
}
@@ -210,178 +195,3 @@ impl CircuitsHandler for BaseCircuitsHandler {
}
}
}
// =================================== tests module ========================================
#[cfg(test)]
mod tests {
use super::*;
use crate::zk_circuits_handler::utils::encode_vk;
use prover::utils::chunk_trace_to_witness_block;
use std::{path::PathBuf, sync::LazyLock};
#[ctor::ctor]
fn init() {
crate::utils::log_init(None);
log::info!("logger initialized");
}
static DEFAULT_WORK_DIR: &str = "/assets";
static WORK_DIR: LazyLock<String> = LazyLock::new(|| {
std::env::var("BERNOULLI_TEST_DIR")
.unwrap_or(String::from(DEFAULT_WORK_DIR))
.trim_end_matches('/')
.to_string()
});
static PARAMS_PATH: LazyLock<String> = LazyLock::new(|| format!("{}/test_params", *WORK_DIR));
static ASSETS_PATH: LazyLock<String> = LazyLock::new(|| format!("{}/test_assets", *WORK_DIR));
static PROOF_DUMP_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/proof_data", *WORK_DIR));
static BATCH_DIR_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/traces/batch_24", *WORK_DIR));
static BATCH_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/agg_vk.vkey", *WORK_DIR));
static CHUNK_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/chunk_vk.vkey", *WORK_DIR));
#[test]
fn it_works() {
let result = true;
assert!(result);
}
#[test]
fn test_circuits() -> Result<()> {
let chunk_handler =
BaseCircuitsHandler::new(ProofType::Chunk, &PARAMS_PATH, &ASSETS_PATH, None)?;
let chunk_vk = chunk_handler.get_vk(ProofType::Chunk).unwrap();
check_vk(ProofType::Chunk, chunk_vk, "chunk vk must be available");
let chunk_dir_paths = get_chunk_dir_paths()?;
log::info!("chunk_dir_paths, {:?}", chunk_dir_paths);
let mut chunk_infos = vec![];
let mut chunk_proofs = vec![];
for (id, chunk_path) in chunk_dir_paths.into_iter().enumerate() {
let chunk_id = format!("chunk_proof{}", id + 1);
log::info!("start to process {chunk_id}");
let chunk_trace = read_chunk_trace(chunk_path)?;
let chunk_info = traces_to_chunk_info(chunk_trace.clone())?;
chunk_infos.push(chunk_info);
log::info!("start to prove {chunk_id}");
let chunk_proof = chunk_handler.gen_chunk_proof_raw(chunk_trace)?;
let proof_data = serde_json::to_string(&chunk_proof)?;
dump_proof(chunk_id, proof_data)?;
chunk_proofs.push(chunk_proof);
}
let batch_handler =
BaseCircuitsHandler::new(ProofType::Batch, &PARAMS_PATH, &ASSETS_PATH, None)?;
let batch_vk = batch_handler.get_vk(ProofType::Batch).unwrap();
check_vk(ProofType::Batch, batch_vk, "batch vk must be available");
let chunk_hashes_proofs = chunk_infos.into_iter().zip(chunk_proofs).collect();
log::info!("start to prove batch");
let batch_proof = batch_handler.gen_batch_proof_raw(chunk_hashes_proofs)?;
let proof_data = serde_json::to_string(&batch_proof)?;
dump_proof("batch_proof".to_string(), proof_data)?;
Ok(())
}
fn check_vk(proof_type: ProofType, vk: Vec<u8>, info: &str) {
log::info!("check_vk, {:?}", proof_type);
let vk_from_file = read_vk(proof_type).unwrap();
assert_eq!(vk_from_file, encode_vk(vk), "{info}")
}
fn read_vk(proof_type: ProofType) -> Result<String> {
log::info!("read_vk, {:?}", proof_type);
let vk_file = match proof_type {
ProofType::Chunk => CHUNK_VK_PATH.clone(),
ProofType::Batch => BATCH_VK_PATH.clone(),
ProofType::Undefined => unreachable!(),
};
let data = std::fs::read(vk_file)?;
Ok(encode_vk(data))
}
fn read_chunk_trace(path: PathBuf) -> Result<Vec<BlockTrace>> {
log::info!("read_chunk_trace, {:?}", path);
let mut chunk_trace: Vec<BlockTrace> = vec![];
fn read_block_trace(file: &PathBuf) -> Result<BlockTrace> {
let f = std::fs::File::open(file)?;
Ok(serde_json::from_reader(&f)?)
}
if path.is_dir() {
let entries = std::fs::read_dir(&path)?;
let mut files: Vec<String> = entries
.into_iter()
.filter_map(|e| {
if e.is_err() {
return None;
}
let entry = e.unwrap();
if entry.path().is_dir() {
return None;
}
if let Result::Ok(file_name) = entry.file_name().into_string() {
Some(file_name)
} else {
None
}
})
.collect();
files.sort();
log::info!("files in chunk {:?} is {:?}", path, files);
for file in files {
let block_trace = read_block_trace(&path.join(file))?;
chunk_trace.push(block_trace);
}
} else {
let block_trace = read_block_trace(&path)?;
chunk_trace.push(block_trace);
}
Ok(chunk_trace)
}
fn get_chunk_dir_paths() -> Result<Vec<PathBuf>> {
let batch_path = PathBuf::from(BATCH_DIR_PATH.clone());
let entries = std::fs::read_dir(&batch_path)?;
let mut files: Vec<String> = entries
.filter_map(|e| {
if e.is_err() {
return None;
}
let entry = e.unwrap();
if entry.path().is_dir() {
if let Result::Ok(file_name) = entry.file_name().into_string() {
Some(file_name)
} else {
None
}
} else {
None
}
})
.collect();
files.sort();
log::info!("files in batch {:?} is {:?}", batch_path, files);
Ok(files.into_iter().map(|f| batch_path.join(f)).collect())
}
fn traces_to_chunk_info(chunk_trace: Vec<BlockTrace>) -> Result<ChunkHash> {
let witness_block = chunk_trace_to_witness_block(chunk_trace)?;
Ok(ChunkHash::from_witness_block(&witness_block, false))
}
fn dump_proof(id: String, proof_data: String) -> Result<()> {
let dump_path = PathBuf::from(PROOF_DUMP_PATH.clone());
Ok(std::fs::write(dump_path.join(id), proof_data)?)
}
}

View File

@@ -8,7 +8,7 @@ use std::{cell::RefCell, cmp::Ordering, rc::Rc};
use prover_next::{
aggregator::Prover as BatchProver, check_chunk_hashes, zkevm::Prover as ChunkProver,
BatchProof, BatchProvingTask, BlockTrace, ChunkInfo, ChunkProof, ChunkProvingTask,
BatchProvingTask, BlockTrace, ChunkInfo, ChunkProof, ChunkProvingTask,
};
use super::bernoulli::OUTPUT_DIR;
@@ -59,7 +59,8 @@ impl NextCircuitsHandler {
}
}
fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
fn gen_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
let chunk_trace = self.gen_chunk_traces(task)?;
if let Some(prover) = self.chunk_prover.as_ref() {
let chunk = ChunkProvingTask::from(chunk_trace);
@@ -68,29 +69,22 @@ impl NextCircuitsHandler {
.borrow_mut()
.gen_chunk_proof(chunk, None, None, self.get_output_dir())?;
return Ok(chunk_proof);
return serde_json::to_string(&chunk_proof).map_err(|e| anyhow::anyhow!(e));
}
unreachable!("please check errors in proof_type logic")
}
fn gen_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
let chunk_trace = self.gen_chunk_traces(task)?;
let chunk_proof = self.gen_chunk_proof_raw(chunk_trace)?;
Ok(serde_json::to_string(&chunk_proof)?)
}
fn gen_batch_proof_raw(
&self,
chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)>,
) -> Result<BatchProof> {
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
if let Some(prover) = self.batch_prover.as_ref() {
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> =
self.gen_chunk_hashes_proofs(task)?;
let chunk_proofs: Vec<ChunkProof> =
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
let is_valid = prover.borrow_mut().check_protocol_of_chunks(&chunk_proofs);
if !is_valid {
bail!("non-match chunk protocol")
bail!("non-match chunk protocol, task-id: {}", &task.id)
}
check_chunk_hashes("", &chunk_hashes_proofs).context("failed to check chunk info")?;
let batch = BatchProvingTask { chunk_proofs };
@@ -99,19 +93,11 @@ impl NextCircuitsHandler {
.borrow_mut()
.gen_agg_evm_proof(batch, None, self.get_output_dir())?;
return Ok(batch_proof);
return serde_json::to_string(&batch_proof).map_err(|e| anyhow::anyhow!(e));
}
unreachable!("please check errors in proof_type logic")
}
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
log::info!("[circuit] gen_batch_proof for task {}", task.id);
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> =
self.gen_chunk_hashes_proofs(task)?;
let batch_proof = self.gen_batch_proof_raw(chunk_hashes_proofs)?;
Ok(serde_json::to_string(&batch_proof)?)
}
fn get_output_dir(&self) -> Option<&str> {
OUTPUT_DIR.as_deref()
}
@@ -209,178 +195,3 @@ impl CircuitsHandler for NextCircuitsHandler {
}
}
}
// =================================== tests module ========================================
#[cfg(test)]
mod tests {
use super::*;
use crate::zk_circuits_handler::utils::encode_vk;
use prover_next::utils::chunk_trace_to_witness_block;
use std::{path::PathBuf, sync::LazyLock};
#[ctor::ctor]
fn init() {
crate::utils::log_init(None);
log::info!("logger initialized");
}
static DEFAULT_WORK_DIR: &str = "/assets";
static WORK_DIR: LazyLock<String> = LazyLock::new(|| {
std::env::var("CURIE_TEST_DIR")
.unwrap_or(String::from(DEFAULT_WORK_DIR))
.trim_end_matches('/')
.to_string()
});
static PARAMS_PATH: LazyLock<String> = LazyLock::new(|| format!("{}/test_params", *WORK_DIR));
static ASSETS_PATH: LazyLock<String> = LazyLock::new(|| format!("{}/test_assets", *WORK_DIR));
static PROOF_DUMP_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/proof_data", *WORK_DIR));
static BATCH_DIR_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/traces/batch_24", *WORK_DIR));
static BATCH_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/agg_vk.vkey", *WORK_DIR));
static CHUNK_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/chunk_vk.vkey", *WORK_DIR));
#[test]
fn it_works() {
let result = true;
assert!(result);
}
#[test]
fn test_circuits() -> Result<()> {
let chunk_handler =
NextCircuitsHandler::new(ProofType::Chunk, &PARAMS_PATH, &ASSETS_PATH, None)?;
let chunk_vk = chunk_handler.get_vk(ProofType::Chunk).unwrap();
check_vk(ProofType::Chunk, chunk_vk, "chunk vk must be available");
let chunk_dir_paths = get_chunk_dir_paths()?;
log::info!("chunk_dir_paths, {:?}", chunk_dir_paths);
let mut chunk_infos = vec![];
let mut chunk_proofs = vec![];
for (id, chunk_path) in chunk_dir_paths.into_iter().enumerate() {
let chunk_id = format!("chunk_proof{}", id + 1);
log::info!("start to process {chunk_id}");
let chunk_trace = read_chunk_trace(chunk_path)?;
let chunk_info = traces_to_chunk_info(chunk_trace.clone())?;
chunk_infos.push(chunk_info);
log::info!("start to prove {chunk_id}");
let chunk_proof = chunk_handler.gen_chunk_proof_raw(chunk_trace)?;
let proof_data = serde_json::to_string(&chunk_proof)?;
dump_proof(chunk_id, proof_data)?;
chunk_proofs.push(chunk_proof);
}
let batch_handler =
NextCircuitsHandler::new(ProofType::Batch, &PARAMS_PATH, &ASSETS_PATH, None)?;
let batch_vk = batch_handler.get_vk(ProofType::Batch).unwrap();
check_vk(ProofType::Batch, batch_vk, "batch vk must be available");
let chunk_hashes_proofs = chunk_infos.into_iter().zip(chunk_proofs).collect();
log::info!("start to prove batch");
let batch_proof = batch_handler.gen_batch_proof_raw(chunk_hashes_proofs)?;
let proof_data = serde_json::to_string(&batch_proof)?;
dump_proof("batch_proof".to_string(), proof_data)?;
Ok(())
}
fn check_vk(proof_type: ProofType, vk: Vec<u8>, info: &str) {
log::info!("check_vk, {:?}", proof_type);
let vk_from_file = read_vk(proof_type).unwrap();
assert_eq!(vk_from_file, encode_vk(vk), "{info}")
}
fn read_vk(proof_type: ProofType) -> Result<String> {
log::info!("read_vk, {:?}", proof_type);
let vk_file = match proof_type {
ProofType::Chunk => CHUNK_VK_PATH.clone(),
ProofType::Batch => BATCH_VK_PATH.clone(),
ProofType::Undefined => unreachable!(),
};
let data = std::fs::read(vk_file)?;
Ok(encode_vk(data))
}
fn read_chunk_trace(path: PathBuf) -> Result<Vec<BlockTrace>> {
log::info!("read_chunk_trace, {:?}", path);
let mut chunk_trace: Vec<BlockTrace> = vec![];
fn read_block_trace(file: &PathBuf) -> Result<BlockTrace> {
let f = std::fs::File::open(file)?;
Ok(serde_json::from_reader(&f)?)
}
if path.is_dir() {
let entries = std::fs::read_dir(&path)?;
let mut files: Vec<String> = entries
.into_iter()
.filter_map(|e| {
if e.is_err() {
return None;
}
let entry = e.unwrap();
if entry.path().is_dir() {
return None;
}
if let Result::Ok(file_name) = entry.file_name().into_string() {
Some(file_name)
} else {
None
}
})
.collect();
files.sort();
log::info!("files in chunk {:?} is {:?}", path, files);
for file in files {
let block_trace = read_block_trace(&path.join(file))?;
chunk_trace.push(block_trace);
}
} else {
let block_trace = read_block_trace(&path)?;
chunk_trace.push(block_trace);
}
Ok(chunk_trace)
}
fn get_chunk_dir_paths() -> Result<Vec<PathBuf>> {
let batch_path = PathBuf::from(BATCH_DIR_PATH.clone());
let entries = std::fs::read_dir(&batch_path)?;
let mut files: Vec<String> = entries
.filter_map(|e| {
if e.is_err() {
return None;
}
let entry = e.unwrap();
if entry.path().is_dir() {
if let Result::Ok(file_name) = entry.file_name().into_string() {
Some(file_name)
} else {
None
}
} else {
None
}
})
.collect();
files.sort();
log::info!("files in batch {:?} is {:?}", batch_path, files);
Ok(files.into_iter().map(|f| batch_path.join(f)).collect())
}
fn traces_to_chunk_info(chunk_trace: Vec<BlockTrace>) -> Result<ChunkInfo> {
let witness_block = chunk_trace_to_witness_block(chunk_trace)?;
Ok(ChunkInfo::from_witness_block(&witness_block, false))
}
fn dump_proof(id: String, proof_data: String) -> Result<()> {
let dump_path = PathBuf::from(PROOF_DUMP_PATH.clone());
Ok(std::fs::write(dump_path.join(id), proof_data)?)
}
}

View File

@@ -34,7 +34,12 @@ import (
rutils "scroll-tech/rollup/internal/utils"
)
// Layer2Relayer is responsible for committing and finalizing L2 blocks on L1.
// Layer2Relayer is responsible for
// 1. Committing and finalizing L2 blocks on L1
// 2. Relaying messages from L2 to L1
//
// Actions are triggered by new head from layer 1 geth node.
// @todo It's better to be triggered by watcher.
type Layer2Relayer struct {
ctx context.Context
@@ -597,7 +602,7 @@ func (r *Layer2Relayer) finalizeBatch(dbBatch *orm.Batch, withProof bool) error
log.Info("finalizeBatch in layer1", "with proof", withProof, "index", dbBatch.Index, "batch hash", dbBatch.Hash, "tx hash", txHash.String())
// Updating rollup status in database.
// record and sync with db, @todo handle db error
if err := r.batchOrm.UpdateFinalizeTxHashAndRollupStatus(r.ctx, dbBatch.Hash, txHash.String(), types.RollupFinalizing); err != nil {
log.Error("UpdateFinalizeTxHashAndRollupStatus failed", "index", dbBatch.Index, "batch hash", dbBatch.Hash, "tx hash", txHash.String(), "err", err)
return err