Compare commits

..

50 Commits

Author SHA1 Message Date
Péter Garamvölgyi
f69fc7b5d5 enable isCurie on deployment 2024-07-09 14:21:14 +02:00
Péter Garamvölgyi
6e905352fc bump foundry version 2024-07-05 17:08:24 +02:00
Péter Garamvölgyi
0680c0d4d3 set Curie in genesis 2024-07-05 16:53:09 +02:00
Péter Garamvölgyi
29a3447d50 Merge branch 'develop' into feat-deterministic-deployment 2024-07-05 16:44:21 +02:00
Mengran Lan
16673e2b97 feat(bridge-history): change FinalizeBatch event logic (#1405)
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
Co-authored-by: amoylan2 <amoylan2@users.noreply.github.com>
2024-07-02 00:40:04 +08:00
colin
a2536d5613 fix(coordinator): panic (#1407)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
2024-06-29 06:01:44 +08:00
colin
5f31d28ced fix(coordinator): get empty task (#1406)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
2024-06-28 23:17:35 +08:00
colin
eada1d05fe fix(Makefile): remove outdated prover in golang-related cmds (#1398) 2024-06-26 14:36:10 +08:00
sbaizet
2f39e37bc2 Ci coordinator api arm64 (#1383) 2024-06-24 09:17:22 +02:00
Zhang Zhuo
d454941c81 chore(libzkp): v0.11.4 (#1391)
Co-authored-by: georgehao <georgehao@users.noreply.github.com>
2024-06-24 09:35:57 +08:00
Péter Garamvölgyi
977f5701fe fix REACT_APP_EXTERNAL_RPC_URI_L1 2024-06-18 20:13:07 +02:00
Mengran Lan
738c85759d feat(prover): add unit test for circuits e2e test (used in test_zkp project) (#1379) 2024-06-18 15:21:26 +08:00
Zhang Zhuo
27d627e318 feat(coordinator & prover): upgrade libzkp to v0.11.3 (#1384)
Co-authored-by: colinlyguo <colinlyguo@scroll.io>
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
2024-06-18 10:39:32 +08:00
Kenn
8c3ecd395f chore: spelling fixes (#1378) 2024-06-18 09:31:20 +08:00
Hsiao_Jan
33016b1d5d fix(db): the function UpdateProvingStatusFailed proving_status determines the condition incorrectly (#1377)
Co-authored-by: xiaoranlu <xiaoranlu@tencent.com>
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2024-06-14 10:01:33 +08:00
Péter Garamvölgyi
53d0389ba7 update base image to arm-friendly one 2024-06-13 17:26:56 +02:00
Péter Garamvölgyi
15969d34ce update frontend URLs 2024-06-10 16:32:53 +02:00
Péter Garamvölgyi
fc0cb979f0 set mock finalization params 2024-06-04 20:33:01 +02:00
Péter Garamvölgyi
2b5b82d466 init frontend config 2024-05-31 09:38:56 +02:00
Péter Garamvölgyi
e2b87879da add chain-monitor and balance-checker config gen 2024-05-22 15:35:37 +02:00
Péter Garamvölgyi
4ab710359d update redis config values 2024-05-22 14:52:34 +02:00
Péter Garamvölgyi
f27d27cb8a add coordinator-config.json 2024-05-21 16:55:15 +02:00
Péter Garamvölgyi
0e28f28a6f Merge branch 'develop' into feat-deterministic-deployment 2024-05-21 16:38:43 +02:00
Péter Garamvölgyi
cbd3f54b6b update scripts 2024-05-13 16:29:34 +01:00
Péter Garamvölgyi
bc1cb1f86e add missing files to container 2024-05-13 16:27:40 +01:00
Péter Garamvölgyi
5497777c88 update dockerfile 2024-05-13 16:18:15 +01:00
Péter Garamvölgyi
39db5634e2 wip: generate other config files 2024-05-07 16:30:52 +08:00
Péter Garamvölgyi
2d14457c91 make envs more configurable 2024-05-03 15:23:58 +08:00
Péter Garamvölgyi
5b3a65b208 use legacy txs on L2 2024-04-29 23:10:21 +08:00
Péter Garamvölgyi
53b14bc090 fix genesis extradata 2024-04-29 20:09:15 +08:00
Péter Garamvölgyi
180f3c63fb ensure l1ChainId is serialized as string 2024-04-29 19:06:30 +08:00
Péter Garamvölgyi
5e59373cf4 add simple build script 2024-04-28 13:11:20 +08:00
Péter Garamvölgyi
1d856821df small improvements 2024-04-28 13:01:44 +08:00
Péter Garamvölgyi
98d2c333bd move files 2024-04-28 12:27:16 +08:00
Péter Garamvölgyi
d4bbb252de enable cache 2024-04-28 10:37:36 +08:00
Péter Garamvölgyi
7ac34b3196 fmt 2024-04-27 20:23:56 +08:00
Péter Garamvölgyi
dfab7315df predeploy DeterministicDeploymentProxy 2024-04-27 20:19:22 +08:00
Péter Garamvölgyi
623cf34fa3 improve dockerfiles, better error messages 2024-04-27 20:03:14 +08:00
Péter Garamvölgyi
ad5c47509f docker 2024-04-26 21:10:05 +08:00
Péter Garamvölgyi
03fdd8eb05 move files 2024-04-26 19:49:37 +08:00
Péter Garamvölgyi
421cd7e96d clean up 2024-04-26 19:30:42 +08:00
Péter Garamvölgyi
31a636a66b simplify contract address prediction 2024-04-26 18:28:22 +08:00
Péter Garamvölgyi
3c6c86eaad move configurations 2024-04-26 18:10:41 +08:00
Péter Garamvölgyi
9f7841a468 add GenerateGenesis 2024-04-26 17:52:49 +08:00
Péter Garamvölgyi
e56d9a9cff fmt 2024-04-26 16:47:46 +08:00
Péter Garamvölgyi
7b2228898b add GenerateGenesisAlloc script 2024-04-26 16:38:52 +08:00
Péter Garamvölgyi
491aa91369 use toml config file 2024-04-26 15:59:01 +08:00
Péter Garamvölgyi
6eb58c1097 rename param 2024-04-25 18:14:14 +08:00
Péter Garamvölgyi
5c42eb381b disable bytecode_hash 2024-04-25 18:12:42 +08:00
Péter Garamvölgyi
febc8a7a38 feat: add deterministic, multi-layer deployment script 2024-04-25 17:02:28 +08:00
57 changed files with 3708 additions and 282 deletions

View File

@@ -0,0 +1,41 @@
name: Docker-coordinator-api-arm64
on:
workflow_dispatch:
inputs:
tag:
description: "tag of this image (suffix -arm64 is added automatically)"
required: true
type: string
jobs:
build-and-push-arm64-image:
runs-on: ubuntu-latest
strategy:
matrix:
arch:
- aarch64
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up QEMU
run: |
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
docker buildx create --name multiarch --driver docker-container --use
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build docker image
uses: docker/build-push-action@v2
with:
platforms: linux/arm64
context: .
file: ./build/dockerfiles/coordinator-api.Dockerfile
push: true
tags: scrolltech/coordinator-api:${{inputs.tag}}-arm64

View File

@@ -361,7 +361,6 @@ jobs:
with:
context: .
file: ./build/dockerfiles/coordinator-api.Dockerfile
platforms: linux/amd64
push: true
tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}

View File

@@ -12,7 +12,6 @@ update:
cd $(PWD)/common/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG}&& go mod tidy
cd $(PWD)/coordinator/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG} && go mod tidy
cd $(PWD)/database/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG} && go mod tidy
cd $(PWD)/prover/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG}&& go mod tidy
cd $(PWD)/rollup/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG} && go mod tidy
cd $(PWD)/tests/integration-test/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG} && go mod tidy
@@ -21,7 +20,6 @@ lint: ## The code's format and security checks.
make -C common lint
make -C coordinator lint
make -C database lint
make -C prover lint
make -C bridge-history-api lint
fmt: ## format the code
@@ -30,7 +28,6 @@ fmt: ## format the code
cd $(PWD)/common/ && go mod tidy
cd $(PWD)/coordinator/ && go mod tidy
cd $(PWD)/database/ && go mod tidy
cd $(PWD)/prover/ && go mod tidy
cd $(PWD)/rollup/ && go mod tidy
cd $(PWD)/tests/integration-test/ && go mod tidy
@@ -38,7 +35,6 @@ fmt: ## format the code
goimports -local $(PWD)/common/ -w .
goimports -local $(PWD)/coordinator/ -w .
goimports -local $(PWD)/database/ -w .
goimports -local $(PWD)/prover/ -w .
goimports -local $(PWD)/rollup/ -w .
goimports -local $(PWD)/tests/integration-test/ -w .

View File

@@ -147,7 +147,7 @@ func (b *EventUpdateLogic) updateL2WithdrawMessageInfos(ctx context.Context, bat
}
if withdrawTrie.NextMessageNonce != l2WithdrawMessages[0].MessageNonce {
log.Error("nonce mismatch", "expected next message nonce", withdrawTrie.NextMessageNonce, "actuall next message nonce", l2WithdrawMessages[0].MessageNonce)
log.Error("nonce mismatch", "expected next message nonce", withdrawTrie.NextMessageNonce, "actual next message nonce", l2WithdrawMessages[0].MessageNonce)
return fmt.Errorf("nonce mismatch")
}

View File

@@ -273,7 +273,6 @@ func (e *L1EventParser) ParseL1BatchEventLogs(ctx context.Context, logs []types.
l1BatchEvents = append(l1BatchEvents, &orm.BatchEvent{
BatchStatus: int(btypes.BatchStatusTypeFinalized),
BatchIndex: event.BatchIndex.Uint64(),
BatchHash: event.BatchHash.String(),
L1BlockNumber: vlog.BlockNumber,
})
}

View File

@@ -73,6 +73,9 @@ func (c *BatchEvent) GetFinalizedBatchesLEBlockHeight(ctx context.Context, block
// InsertOrUpdateBatchEvents inserts a new batch event or updates an existing one based on the BatchStatusType.
func (c *BatchEvent) InsertOrUpdateBatchEvents(ctx context.Context, l1BatchEvents []*BatchEvent) error {
var maxFinalizedBatchIndex uint64
var containsFinalizedEvent bool
var maxL1BlockNumber uint64
for _, l1BatchEvent := range l1BatchEvents {
db := c.db
db = db.WithContext(ctx)
@@ -89,11 +92,13 @@ func (c *BatchEvent) InsertOrUpdateBatchEvents(ctx context.Context, l1BatchEvent
return fmt.Errorf("failed to insert or ignore batch event, error: %w", err)
}
case btypes.BatchStatusTypeFinalized:
db = db.Where("batch_index = ?", l1BatchEvent.BatchIndex)
db = db.Where("batch_hash = ?", l1BatchEvent.BatchHash)
updateFields["batch_status"] = btypes.BatchStatusTypeFinalized
if err := db.Updates(updateFields).Error; err != nil {
return fmt.Errorf("failed to update batch event, error: %w", err)
containsFinalizedEvent = true
// get the maxFinalizedBatchIndex, which signals all the batch before it are all finalized
if l1BatchEvent.BatchIndex > maxFinalizedBatchIndex {
maxFinalizedBatchIndex = l1BatchEvent.BatchIndex
}
if l1BatchEvent.L1BlockNumber > maxL1BlockNumber {
maxL1BlockNumber = l1BatchEvent.L1BlockNumber
}
case btypes.BatchStatusTypeReverted:
db = db.Where("batch_index = ?", l1BatchEvent.BatchIndex)
@@ -108,6 +113,21 @@ func (c *BatchEvent) InsertOrUpdateBatchEvents(ctx context.Context, l1BatchEvent
}
}
}
if containsFinalizedEvent {
db := c.db
db = db.WithContext(ctx)
db = db.Model(&BatchEvent{})
updateFields := make(map[string]interface{})
// After darwin, FinalizeBatch event signals a range of batches are finalized,
// thus losing the batch hash info. Meanwhile, only batch_index is enough to update finalized batches.
db = db.Where("batch_index <= ?", maxFinalizedBatchIndex)
db = db.Where("batch_status != ?", btypes.BatchStatusTypeFinalized)
updateFields["batch_status"] = btypes.BatchStatusTypeFinalized
updateFields["l1_block_number"] = maxL1BlockNumber
if err := db.Updates(updateFields).Error; err != nil {
return fmt.Errorf("failed to update batch event, error: %w", err)
}
}
return nil
}

View File

@@ -31,7 +31,7 @@ dependencies = [
[[package]]
name = "aggregator"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
@@ -537,7 +537,7 @@ checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1"
[[package]]
name = "bus-mapping"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types",
"ethers-core",
@@ -1126,7 +1126,7 @@ dependencies = [
[[package]]
name = "eth-types"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"base64 0.13.1",
"ethers-core",
@@ -1283,7 +1283,7 @@ dependencies = [
[[package]]
name = "external-tracer"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types",
"geth-utils",
@@ -1465,7 +1465,7 @@ dependencies = [
[[package]]
name = "gadgets"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types",
"halo2_proofs",
@@ -1488,7 +1488,7 @@ dependencies = [
[[package]]
name = "geth-utils"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"env_logger 0.10.0",
"gobuild",
@@ -2237,7 +2237,7 @@ dependencies = [
[[package]]
name = "mock"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types",
"ethers-core",
@@ -2252,7 +2252,7 @@ dependencies = [
[[package]]
name = "mpt-zktrie"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types",
"halo2curves",
@@ -2724,7 +2724,7 @@ dependencies = [
[[package]]
name = "prover"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"aggregator",
"anyhow",
@@ -4361,7 +4361,7 @@ dependencies = [
[[package]]
name = "zkevm-circuits"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"array-init",
"bus-mapping",

View File

@@ -25,7 +25,7 @@ bls12_381 = { git = "https://github.com/scroll-tech/bls12_381", branch = "feat/i
[dependencies]
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.1", default-features = false, features = ["parallel_syn", "scroll"] }
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.4", default-features = false, features = ["parallel_syn", "scroll"] }
base64 = "0.13.0"
env_logger = "0.9.0"

View File

@@ -5,7 +5,7 @@ import (
"runtime/debug"
)
var tag = "v4.4.17"
var tag = "v4.4.23"
var commit = func() string {
if info, ok := debug.ReadBuildInfo(); ok {

View File

@@ -0,0 +1,47 @@
# Use the latest node Debian slim base image
# This makes installing yarn dep much easier
FROM node:20-bookworm-slim
# Switch to bash shell
SHELL ["/bin/bash", "-c"]
WORKDIR /root
# Install dependencies
RUN apt update
RUN apt install --yes curl bash coreutils git jq
# Download and run the Foundry installation script
RUN curl -L https://foundry.paradigm.xyz | bash
# Set the environment variables to ensure Foundry tools are in the PATH
ENV PATH="/root/.foundry/bin:${PATH}"
# Run foundryup to update Foundry
RUN foundryup -v nightly-56dbd20c7179570c53b6c17ff34daa7273a4ddae
# copy dependencies
COPY ./lib /contracts/lib
COPY ./node_modules/@openzeppelin /contracts/node_modules/@openzeppelin
# copy configurations
COPY foundry.toml /contracts/foundry.toml
COPY remappings.txt /contracts/remappings.txt
# copy source code
COPY ./src /contracts/src
COPY ./scripts /contracts/scripts
# compile contracts
ENV FOUNDRY_EVM_VERSION="cancun"
ENV FOUNDRY_BYTECODE_HASH="none"
WORKDIR /contracts
RUN forge build
# copy script configs
COPY ./docker/templates/config-contracts.toml /contracts/docker/templates/config-contracts.toml
COPY ./docker/scripts/deploy.sh /contracts/docker/scripts/deploy.sh
ENTRYPOINT ["/bin/bash", "/contracts/docker/scripts/deploy.sh"]

View File

@@ -0,0 +1,53 @@
# Use the latest node Debian slim base image
# This makes installing yarn dep much easier
FROM node:20-bookworm-slim
# Switch to bash shell
SHELL ["/bin/bash", "-c"]
WORKDIR /root
# Install dependencies
RUN apt update
RUN apt install --yes curl bash coreutils git jq
# Download and run the Foundry installation script
RUN curl -L https://foundry.paradigm.xyz | bash
# Set the environment variables to ensure Foundry tools are in the PATH
ENV PATH="/root/.foundry/bin:${PATH}"
# Run foundryup to update Foundry
RUN foundryup -v nightly-56dbd20c7179570c53b6c17ff34daa7273a4ddae
# copy dependencies
COPY ./lib /contracts/lib
COPY ./node_modules/@openzeppelin /contracts/node_modules/@openzeppelin
# copy configurations
COPY foundry.toml /contracts/foundry.toml
COPY remappings.txt /contracts/remappings.txt
# copy source code
COPY ./src /contracts/src
COPY ./scripts /contracts/scripts
# compile contracts
ENV FOUNDRY_EVM_VERSION="cancun"
ENV FOUNDRY_BYTECODE_HASH="none"
WORKDIR /contracts
RUN forge build
# copy script configs
COPY ./docker/templates/balance-checker-config.json /contracts/docker/templates/balance-checker-config.json
COPY ./docker/templates/bridge-history-config.json /contracts/docker/templates/bridge-history-config.json
COPY ./docker/templates/chain-monitor-config.json /contracts/docker/templates/chain-monitor-config.json
COPY ./docker/templates/config-contracts.toml /contracts/docker/templates/config-contracts.toml
COPY ./docker/templates/coordinator-config.json /contracts/docker/templates/coordinator-config.json
COPY ./docker/templates/genesis.json /contracts/docker/templates/genesis.json
COPY ./docker/templates/rollup-config.json /contracts/docker/templates/rollup-config.json
COPY ./docker/scripts/gen-configs.sh /contracts/docker/scripts/gen-configs.sh
ENTRYPOINT ["/bin/bash", "/contracts/docker/scripts/gen-configs.sh"]

View File

@@ -0,0 +1,75 @@
[general]
L1_RPC_ENDPOINT = "http://l1geth:8545"
L2_RPC_ENDPOINT = "http://l2geth:8545"
CHAIN_ID_L1 = 111111
CHAIN_ID_L2 = 222222
MAX_TX_IN_CHUNK = 100
MAX_BLOCK_IN_CHUNK = 100
MAX_L1_MESSAGE_GAS_LIMIT = 10000
L1_CONTRACT_DEPLOYMENT_BLOCK = 0
TEST_ENV_MOCK_FINALIZE_ENABLED = true
TEST_ENV_MOCK_FINALIZE_TIMEOUT_SEC = 3600
[accounts]
# note: for now we simply use Anvil's dev accounts
DEPLOYER_PRIVATE_KEY = "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"
OWNER_PRIVATE_KEY = "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"
L1_COMMIT_SENDER_PRIVATE_KEY = "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d"
L1_FINALIZE_SENDER_PRIVATE_KEY = "0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a"
L1_GAS_ORACLE_SENDER_PRIVATE_KEY = "0x7c852118294e51e653712a81e05800f419141751be58f605c371e15141b007a6"
L2_GAS_ORACLE_SENDER_PRIVATE_KEY = "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"
DEPLOYER_ADDR = "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266"
OWNER_ADDR = "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266"
L1_COMMIT_SENDER_ADDR = "0x70997970C51812dc3A010C7d01b50e0d17dc79C8"
L1_FINALIZE_SENDER_ADDR = "0x3C44CdDdB6a900fa2b585dd299e03d12FA4293BC"
L1_GAS_ORACLE_SENDER_ADDR = "0x90F79bf6EB2c4f870365E785982E1f101E93b906"
L2_GAS_ORACLE_SENDER_ADDR = "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266"
L2GETH_SIGNER_0_ADDRESS = "0x756EA06BDEe36de11F22DCca45a31d8a178eF3c6"
[db]
SCROLL_DB_CONNECTION_STRING = "postgres://postgres:scroll2022@db:5432/scroll?sslmode=disable"
CHAIN_MONITOR_DB_CONNECTION_STRING = "postgres://postgres:scroll2022@db:5432/chain_monitor?sslmode=disable"
BRIDGE_HISTORY_DB_CONNECTION_STRING = "postgres://postgres:scroll2022@db:5432/bridge_history?sslmode=disable"
[genesis]
L2_MAX_ETH_SUPPLY = "226156424291633194186662080095093570025917938800079226639565593765455331328"
L2_DEPLOYER_INITIAL_BALANCE = 1000000000000000000
[contracts]
DEPLOYMENT_SALT = ""
# contracts deployed outside this script
L1_FEE_VAULT_ADDR = "0x0000000000000000000000000000000000000001"
L1_PLONK_VERIFIER_ADDR = "0x0000000000000000000000000000000000000001"
[contracts.overrides]
# L1_WETH = "0xfFf9976782d46CC05630D1f6eBAb18b2324d6B14"
L2_MESSAGE_QUEUE = "0x5300000000000000000000000000000000000000"
L1_GAS_PRICE_ORACLE = "0x5300000000000000000000000000000000000002"
L2_WHITELIST = "0x5300000000000000000000000000000000000003"
L2_WETH = "0x5300000000000000000000000000000000000004"
L2_TX_FEE_VAULT = "0x5300000000000000000000000000000000000005"
[coordinator]
COORDINATOR_JWT_SECRET_KEY = "e788b62d39254928a821ac1c76b274a8c835aa1e20ecfb6f50eb10e87847de44"

View File

@@ -0,0 +1,26 @@
#!/bin/sh
latest_commit=$(git log -1 --pretty=format:%h)
tag=${latest_commit:0:8}
echo "Using Docker image tag: $tag"
echo ""
docker build -f docker/Dockerfile.gen-configs -t scrolltech/scroll-stack-contracts:gen-configs-$tag-amd64 --platform linux/amd64 .
echo
echo "built scrolltech/scroll-stack-contracts:gen-configs-$tag-amd64"
echo
docker build -f docker/Dockerfile.gen-configs -t scrolltech/scroll-stack-contracts:gen-configs-$tag-arm64 --platform linux/arm64 .
echo
echo "built scrolltech/scroll-stack-contracts:gen-configs-$tag-arm64"
echo
docker build -f docker/Dockerfile.deploy -t scrolltech/scroll-stack-contracts:deploy-$tag-amd64 --platform linux/amd64 .
echo
echo "built scrolltech/scroll-stack-contracts:deploy-$tag-amd64"
echo
docker build -f docker/Dockerfile.deploy -t scrolltech/scroll-stack-contracts:deploy-$tag-arm64 --platform linux/arm64 .
echo
echo "built scrolltech/scroll-stack-contracts:deploy-$tag-arm64"
echo

View File

@@ -0,0 +1,46 @@
#!/bin/sh
export FOUNDRY_EVM_VERSION="cancun"
export FOUNDRY_BYTECODE_HASH="none"
if [ "${L1_RPC_ENDPOINT}" = "" ]; then
echo "L1_RPC_ENDPOINT is not set"
L1_RPC_ENDPOINT="http://host.docker.internal:8543"
fi
if [ "$L2_RPC_ENDPOINT" = "" ]; then
echo "L2_RPC_ENDPOINT is not set"
L2_RPC_ENDPOINT="http://host.docker.internal:8545"
fi
if [ "${L1_RPC_ENDPOINT}" = "" ]; then
echo "L1_RPC_ENDPOINT is not set"
L1_RPC_ENDPOINT="http://host.docker.internal:8543"
fi
if [ "${BATCH_SIZE}" = "" ]; then
BATCH_SIZE="100"
fi
echo "using L1_RPC_ENDPOINT = $L1_RPC_ENDPOINT"
echo "using L2_RPC_ENDPOINT = $L2_RPC_ENDPOINT"
# simulate L1
echo ""
echo "simulating on L1"
forge script scripts/foundry/DeployScroll.s.sol:DeployScroll --rpc-url "$L1_RPC_ENDPOINT" --sig "run(string,string)" "L1" "verify-config" || exit 1
# simulate L2
echo ""
echo "simulating on L2"
forge script scripts/foundry/DeployScroll.s.sol:DeployScroll --rpc-url "$L2_RPC_ENDPOINT" --sig "run(string,string)" "L2" "verify-config" --legacy || exit 1
# deploy L1
echo ""
echo "deploying on L1"
forge script scripts/foundry/DeployScroll.s.sol:DeployScroll --rpc-url "$L1_RPC_ENDPOINT" --batch-size "$BATCH_SIZE" --sig "run(string,string)" "L1" "verify-config" --broadcast || exit 1
# deploy L2
echo ""
echo "deploying on L2"
forge script scripts/foundry/DeployScroll.s.sol:DeployScroll --rpc-url "$L2_RPC_ENDPOINT" --batch-size "$BATCH_SIZE" --sig "run(string,string)" "L2" "verify-config" --broadcast --legacy || exit 1

View File

@@ -0,0 +1,33 @@
#!/bin/bash
echo ""
echo "generating config-contracts.toml"
forge script scripts/foundry/DeployScroll.s.sol:DeployScroll --sig "run(string,string)" "none" "write-config" || exit 1
echo ""
echo "generating genesis.json"
forge script scripts/foundry/DeployScroll.s.sol:GenerateGenesis || exit 1
echo ""
echo "generating rollup-config.json"
forge script scripts/foundry/DeployScroll.s.sol:GenerateRollupConfig || exit 1
echo ""
echo "generating coordinator-config.json"
forge script scripts/foundry/DeployScroll.s.sol:GenerateCoordinatorConfig || exit 1
echo ""
echo "generating chain-monitor-config.json"
forge script scripts/foundry/DeployScroll.s.sol:GenerateChainMonitorConfig || exit 1
echo ""
echo "generating bridge-history-config.json"
forge script scripts/foundry/DeployScroll.s.sol:GenerateBridgeHistoryConfig || exit 1
echo ""
echo "generating balance-checker-config.json"
forge script scripts/foundry/DeployScroll.s.sol:GenerateBalanceCheckerConfig || exit 1
echo ""
echo "generating .env.frontend"
forge script scripts/foundry/DeployScroll.s.sol:GenerateFrontendConfig || exit 1

View File

@@ -0,0 +1,24 @@
#!/bin/sh
latest_commit=$(git log -1 --pretty=format:%h)
tag=${latest_commit:0:8}
echo "Using Docker image tag: $tag"
echo ""
docker push scrolltech/scroll-stack-contracts:gen-configs-$tag-amd64
docker push scrolltech/scroll-stack-contracts:gen-configs-$tag-arm64
docker manifest create scrolltech/scroll-stack-contracts:gen-configs-$tag \
--amend scrolltech/scroll-stack-contracts:gen-configs-$tag-amd64 \
--amend scrolltech/scroll-stack-contracts:gen-configs-$tag-arm64
docker manifest push scrolltech/scroll-stack-contracts:gen-configs-$tag
docker push scrolltech/scroll-stack-contracts:deploy-$tag-amd64
docker push scrolltech/scroll-stack-contracts:deploy-$tag-arm64
docker manifest create scrolltech/scroll-stack-contracts:deploy-$tag \
--amend scrolltech/scroll-stack-contracts:deploy-$tag-amd64 \
--amend scrolltech/scroll-stack-contracts:deploy-$tag-arm64
docker manifest push scrolltech/scroll-stack-contracts:deploy-$tag

View File

@@ -0,0 +1,42 @@
{
"addresses": [
{
"rpc_url": "${SCROLL_L1_RPC}",
"min_balance_ether": "10",
"address": "${L1_COMMIT_SENDER_ADDRESS}",
"name": "L1_COMMIT_SENDER"
},
{
"rpc_url": "${SCROLL_L1_RPC}",
"min_balance_ether": "10",
"address": "${L1_FINALIZE_SENDER_ADDRESS}",
"name": "L1_FINALIZE_SENDER"
},
{
"rpc_url": "${SCROLL_L1_RPC}",
"min_balance_ether": "1.1",
"address": "${L1_GAS_ORACLE_SENDER_ADDRESS}",
"name": "L1_GAS_ORACLE_SENDER"
},
{
"rpc_url": "${SCROLL_L1_RPC}",
"min_balance_ether": "0",
"address": "${L1_SCROLL_FEE_VAULT_ADDRESS}",
"name": "L1_SCROLL_FEE_VAULT"
},
{
"rpc_url": "${SCROLL_L2_RPC}",
"min_balance_ether": "1.1",
"address": "${L2_GAS_ORACLE_SENDER_ADDRESS}",
"name": "L2_GAS_ORACLE_SENDER"
},
{
"rpc_url": "${SCROLL_L2_RPC}",
"min_balance_ether": "0",
"address": "${L2_TX_FEE_VAULT_ADDR}",
"name": "L2_TX_FEE_VAULT"
}
],
"JOB_INTERVAL_SECS": 60,
"BIND_PORT": 8080
}

View File

@@ -0,0 +1,56 @@
{
"L1": {
"confirmation": 0,
"endpoint": null,
"startHeight": 0,
"blockTime": 12,
"fetchLimit": 16,
"MessageQueueAddr": null,
"MessengerAddr": null,
"ScrollChainAddr": null,
"GatewayRouterAddr": null,
"ETHGatewayAddr": null,
"WETHGatewayAddr": null,
"StandardERC20GatewayAddr": null,
"CustomERC20GatewayAddr": null,
"ERC721GatewayAddr": null,
"ERC1155GatewayAddr": null,
"USDCGatewayAddr": "0x0000000000000000000000000000000000000000",
"LIDOGatewayAddr": "0x0000000000000000000000000000000000000000",
"DAIGatewayAddr": "0x0000000000000000000000000000000000000000",
"PufferGatewayAddr": "0x0000000000000000000000000000000000000000"
},
"L2": {
"confirmation": 0,
"endpoint": null,
"blockTime": 3,
"fetchLimit": 64,
"MessageQueueAddr": null,
"MessengerAddr": null,
"GatewayRouterAddr": null,
"ETHGatewayAddr": null,
"WETHGatewayAddr": null,
"StandardERC20GatewayAddr": null,
"CustomERC20GatewayAddr": null,
"ERC721GatewayAddr": null,
"ERC1155GatewayAddr": null,
"USDCGatewayAddr": "0x0000000000000000000000000000000000000000",
"LIDOGatewayAddr": "0x0000000000000000000000000000000000000000",
"DAIGatewayAddr": "0x0000000000000000000000000000000000000000",
"PufferGatewayAddr": "0x0000000000000000000000000000000000000000"
},
"db": {
"dsn": null,
"driverName": "postgres",
"maxOpenNum": 200,
"maxIdleNum": 20
},
"redis": {
"address": "localhost:6379",
"username": "default",
"password": "",
"local": true,
"minIdleConns": 10,
"readTimeoutMs": 500
}
}

View File

@@ -0,0 +1,56 @@
{
"l1_config": {
"l1_url": null,
"confirm": "0x20",
"start_number": null,
"l1_contracts": {
"l1_gateways": {
"eth_gateway": null,
"weth_gateway": null,
"standard_erc20_gateway": null,
"custom_erc20_gateway": null,
"erc721_gateway": null,
"erc1155_gateway": null,
"dai_gateway": "0x0000000000000000000000000000000000000000",
"usdc_gateway": "0x0000000000000000000000000000000000000000",
"lido_gateway": "0x0000000000000000000000000000000000000000",
"puffer_gateway": "0x0000000000000000000000000000000000000000"
},
"scroll_messenger": null,
"message_queue": null,
"scroll_chain": null
},
"start_messenger_balance": null
},
"l2_config": {
"l2_url": null,
"confirm": "0x80",
"l2_contracts": {
"l2_gateways": {
"eth_gateway": null,
"weth_gateway": null,
"standard_erc20_gateway": null,
"custom_erc20_gateway": null,
"erc721_gateway": null,
"erc1155_gateway": null,
"dai_gateway": "0x0000000000000000000000000000000000000000",
"usdc_gateway": "0x0000000000000000000000000000000000000000",
"lido_gateway": "0x0000000000000000000000000000000000000000",
"puffer_gateway": "0x0000000000000000000000000000000000000000"
},
"scroll_messenger": null,
"message_queue": null
}
},
"slack_webhook_config": {
"webhook_url": "http://localhost:1234",
"worker_count": 5,
"worker_buffer_size": 1000
},
"db_config": {
"driver_name": "postgres",
"dsn": null,
"maxOpenNum": 100,
"maxIdleNum": 20
}
}

View File

@@ -0,0 +1,55 @@
L1_WETH_ADDR = ""
L1_PROXY_ADMIN_ADDR = ""
L1_PROXY_IMPLEMENTATION_PLACEHOLDER_ADDR = ""
L1_WHITELIST_ADDR = ""
L2_GAS_PRICE_ORACLE_IMPLEMENTATION_ADDR = ""
L2_GAS_PRICE_ORACLE_PROXY_ADDR = ""
L1_SCROLL_CHAIN_PROXY_ADDR = ""
L1_SCROLL_MESSENGER_PROXY_ADDR = ""
L1_ENFORCED_TX_GATEWAY_IMPLEMENTATION_ADDR = ""
L1_ENFORCED_TX_GATEWAY_PROXY_ADDR = ""
L1_ZKEVM_VERIFIER_V1_ADDR = ""
L1_MULTIPLE_VERSION_ROLLUP_VERIFIER_ADDR = ""
L1_MESSAGE_QUEUE_IMPLEMENTATION_ADDR = ""
L1_MESSAGE_QUEUE_PROXY_ADDR = ""
L1_SCROLL_CHAIN_IMPLEMENTATION_ADDR = ""
L1_GATEWAY_ROUTER_IMPLEMENTATION_ADDR = ""
L1_GATEWAY_ROUTER_PROXY_ADDR = ""
L1_ETH_GATEWAY_PROXY_ADDR = ""
L1_WETH_GATEWAY_PROXY_ADDR = ""
L1_STANDARD_ERC20_GATEWAY_PROXY_ADDR = ""
L1_CUSTOM_ERC20_GATEWAY_PROXY_ADDR = ""
L1_ERC721_GATEWAY_PROXY_ADDR = ""
L1_ERC1155_GATEWAY_PROXY_ADDR = ""
L2_MESSAGE_QUEUE_ADDR = ""
L1_GAS_PRICE_ORACLE_ADDR = ""
L2_WHITELIST_ADDR = ""
L2_WETH_ADDR = ""
L2_TX_FEE_VAULT_ADDR = ""
L2_PROXY_ADMIN_ADDR = ""
L2_PROXY_IMPLEMENTATION_PLACEHOLDER_ADDR = ""
L2_SCROLL_MESSENGER_PROXY_ADDR = ""
L2_ETH_GATEWAY_PROXY_ADDR = ""
L2_WETH_GATEWAY_PROXY_ADDR = ""
L2_STANDARD_ERC20_GATEWAY_PROXY_ADDR = ""
L2_CUSTOM_ERC20_GATEWAY_PROXY_ADDR = ""
L2_ERC721_GATEWAY_PROXY_ADDR = ""
L2_ERC1155_GATEWAY_PROXY_ADDR = ""
L2_SCROLL_STANDARD_ERC20_ADDR = ""
L2_SCROLL_STANDARD_ERC20_FACTORY_ADDR = ""
L1_SCROLL_MESSENGER_IMPLEMENTATION_ADDR = ""
L1_STANDARD_ERC20_GATEWAY_IMPLEMENTATION_ADDR = ""
L1_ETH_GATEWAY_IMPLEMENTATION_ADDR = ""
L1_WETH_GATEWAY_IMPLEMENTATION_ADDR = ""
L1_CUSTOM_ERC20_GATEWAY_IMPLEMENTATION_ADDR = ""
L1_ERC721_GATEWAY_IMPLEMENTATION_ADDR = ""
L1_ERC1155_GATEWAY_IMPLEMENTATION_ADDR = ""
L2_SCROLL_MESSENGER_IMPLEMENTATION_ADDR = ""
L2_GATEWAY_ROUTER_IMPLEMENTATION_ADDR = ""
L2_GATEWAY_ROUTER_PROXY_ADDR = ""
L2_STANDARD_ERC20_GATEWAY_IMPLEMENTATION_ADDR = ""
L2_ETH_GATEWAY_IMPLEMENTATION_ADDR = ""
L2_WETH_GATEWAY_IMPLEMENTATION_ADDR = ""
L2_CUSTOM_ERC20_GATEWAY_IMPLEMENTATION_ADDR = ""
L2_ERC721_GATEWAY_IMPLEMENTATION_ADDR = ""
L2_ERC1155_GATEWAY_IMPLEMENTATION_ADDR = ""

View File

@@ -0,0 +1,30 @@
{
"prover_manager": {
"provers_per_session": 1,
"session_attempts": 100,
"chunk_collection_time_sec": 3600,
"batch_collection_time_sec": 600,
"verifier": {
"fork_name": "bernoulli",
"mock_mode": false,
"params_path": "/verifier/params",
"assets_path": "/verifier/assets"
},
"max_verifier_workers": 4,
"min_prover_version": "v4.3.41"
},
"db": {
"driver_name": "postgres",
"dsn": null,
"maxOpenNum": 200,
"maxIdleNum": 20
},
"l2": {
"chain_id": null
},
"auth": {
"secret": null,
"challenge_expire_duration_sec": 10,
"login_expire_duration_sec": 3600
}
}

View File

@@ -0,0 +1,48 @@
{
"config": {
"chainId": null,
"homesteadBlock": 0,
"eip150Block": 0,
"eip150Hash": "0x0000000000000000000000000000000000000000000000000000000000000000",
"eip155Block": 0,
"eip158Block": 0,
"byzantiumBlock": 0,
"constantinopleBlock": 0,
"petersburgBlock": 0,
"istanbulBlock": 0,
"berlinBlock": 0,
"londonBlock": 0,
"archimedesBlock": 0,
"shanghaiBlock": 0,
"bernoulliBlock": 0,
"curieBlock": 0,
"clique": {
"period": 3,
"epoch": 30000
},
"scroll": {
"useZktrie": true,
"maxTxPerBlock": null,
"maxTxPayloadBytesPerBlock": 122880,
"feeVaultAddress": null,
"l1Config": {
"l1ChainId": null,
"l1MessageQueueAddress": null,
"scrollChainAddress": null,
"numL1MessagesPerBlock": "10"
}
}
},
"nonce": "0x0",
"timestamp": null,
"extraData": null,
"gasLimit": "10000000",
"difficulty": "0x1",
"mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
"coinbase": "0x0000000000000000000000000000000000000000",
"alloc": {},
"number": "0x0",
"gasUsed": "0x0",
"parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
"baseFeePerGas": null
}

View File

@@ -0,0 +1,87 @@
{
"l1_config": {
"confirmations": "0x0",
"endpoint": null,
"l1_message_queue_address": null,
"scroll_chain_address": null,
"start_height": 0,
"relayer_config": {
"gas_price_oracle_contract_address": null,
"sender_config": {
"endpoint": null,
"escalate_blocks": 100,
"escalate_multiple_num": 11,
"escalate_multiple_den": 10,
"max_gas_price": 10000000000000,
"tx_type": "LegacyTx",
"check_pending_time": 3,
"confirmations": "0x0"
},
"gas_oracle_config": {
"min_gas_price": 0,
"gas_price_diff": 50000,
"l1_base_fee_weight": 0.086,
"l1_blob_base_fee_weight": 0.030
},
"gas_oracle_sender_private_key": null
}
},
"l2_config": {
"confirmations": "0x0",
"endpoint": null,
"l2_message_queue_address": null,
"relayer_config": {
"rollup_contract_address": null,
"gas_price_oracle_contract_address": null,
"sender_config": {
"endpoint": null,
"escalate_blocks": 4,
"escalate_multiple_num": 12,
"escalate_multiple_den": 10,
"max_gas_price": 200000000000,
"max_blob_gas_price": 200000000000,
"tx_type": "DynamicFeeTx",
"check_pending_time": 10,
"confirmations": "0x0"
},
"gas_oracle_config": {
"min_gas_price": 0,
"gas_price_diff": 50000
},
"chain_monitor": {
"enabled": true,
"timeout": 3,
"try_times": 5,
"base_url": "http://chain-monitorv2:8080"
},
"enable_test_env_bypass_features": null,
"finalize_batch_without_proof_timeout_sec": null,
"gas_oracle_sender_private_key": null,
"commit_sender_private_key": null,
"finalize_sender_private_key": null,
"l1_commit_gas_limit_multiplier": 1.2
},
"chunk_proposer_config": {
"max_block_num_per_chunk": null,
"max_tx_num_per_chunk": null,
"max_l1_commit_gas_per_chunk": 5000000,
"max_l1_commit_calldata_size_per_chunk": 110000,
"chunk_timeout_sec": 2700,
"max_row_consumption_per_chunk": 1000000,
"gas_cost_increase_multiplier": 1.2
},
"batch_proposer_config": {
"max_chunk_num_per_batch": 15,
"max_l1_commit_gas_per_batch": 5000000,
"max_l1_commit_calldata_size_per_batch": 110000,
"batch_timeout_sec": 2700,
"gas_cost_increase_multiplier": 1.2
}
},
"db_config": {
"driver_name": "postgres",
"dsn": null,
"maxOpenNum": 50,
"maxIdleNum": 20
}
}

View File

@@ -38,7 +38,7 @@ Mapping from L2 ERC20 token address to corresponding L2ERC20Gateway.
function defaultERC20Gateway() external view returns (address)
```
The addess of default L2 ERC20 gateway, normally the L2StandardERC20Gateway contract.
The address of default L2 ERC20 gateway, normally the L2StandardERC20Gateway contract.

View File

@@ -83,45 +83,6 @@ function committedBatches(uint256) external view returns (bytes32)
|---|---|---|
| _0 | bytes32 | The batch hash of a committed batch. |
### finalizeBatch
```solidity
function finalizeBatch(bytes _batchHeader, bytes32 _prevStateRoot, bytes32 _postStateRoot, bytes32 _withdrawRoot) external nonpayable
```
Finalize a committed batch on layer 1 without providing proof.
#### Parameters
| Name | Type | Description |
|---|---|---|
| _batchHeader | bytes | undefined |
| _prevStateRoot | bytes32 | undefined |
| _postStateRoot | bytes32 | undefined |
| _withdrawRoot | bytes32 | undefined |
### finalizeBatch4844
```solidity
function finalizeBatch4844(bytes _batchHeader, bytes32 _prevStateRoot, bytes32 _postStateRoot, bytes32 _withdrawRoot, bytes _blobDataProof) external nonpayable
```
Finalize a committed batch (with blob) on layer 1 without providing proof.
*Memory layout of `_blobDataProof`: ```text | z | y | kzg_commitment | kzg_proof | |---------|---------|----------------|-----------| | bytes32 | bytes32 | bytes48 | bytes48 | ```*
#### Parameters
| Name | Type | Description |
|---|---|---|
| _batchHeader | bytes | undefined |
| _prevStateRoot | bytes32 | undefined |
| _postStateRoot | bytes32 | undefined |
| _withdrawRoot | bytes32 | undefined |
| _blobDataProof | bytes | undefined |
### finalizeBatchWithProof
```solidity
@@ -984,7 +945,7 @@ error ErrorRevertFinalizedBatch()
*Thrown when reverting a finialized batch.*
*Thrown when reverting a finalized batch.*
### ErrorRevertNotStartFromEnd
@@ -995,7 +956,7 @@ error ErrorRevertNotStartFromEnd()
*Thrown when the reverted batches are not in the ending of commited batch chain.*
*Thrown when the reverted batches are not in the ending of committed batch chain.*
### ErrorRevertZeroBatches

View File

@@ -7,15 +7,13 @@ libs = [] # a list of librar
remappings = [] # a list of remappings
libraries = [] # a list of deployed libraries to link against
cache = true # whether to cache builds or not
force = true # whether to ignore the cache (clean build)
force = false # whether to ignore the cache (clean build)
evm_version = 'cancun' # the evm version (by hardfork name)
solc_version = '0.8.24' # override for the solc version (setting this ignores `auto_detect_solc`)
optimizer = true # enable or disable the solc optimizer
optimizer_runs = 200 # the number of optimizer runs
verbosity = 2 # the verbosity of tests
ignored_error_codes = [] # a list of ignored solc error codes
fuzz_runs = 256 # the number of fuzz runs for tests
ffi = false # whether to enable ffi or not
sender = '0x00a329c0648769a73afac7f9381e08fb43dbea72' # the address of `msg.sender` in tests
tx_origin = '0x00a329c0648769a73afac7f9381e08fb43dbea72' # the address of `tx.origin` in tests
initial_balance = '0xffffffffffffffffffffffff' # the initial balance of the test contract
@@ -27,4 +25,15 @@ block_coinbase = '0x0000000000000000000000000000000000000000' # the address of `
block_timestamp = 0 # the value of `block.timestamp` in tests
block_difficulty = 0 # the value of `block.difficulty` in tests
# remove bytecode hash for reliable deterministic addresses
bytecode_hash = 'none'
# file system permissions
ffi = true
fs_permissions = [
{ access='read', path='./docker' },
{ access='read-write', path='./volume' },
]
gas_reports = ["L2GasPriceOracle"]

File diff suppressed because it is too large Load Diff

View File

@@ -44,7 +44,7 @@ contract L1ScrollMessenger is ScrollMessengerBase, IL1ScrollMessenger {
struct ReplayState {
// The number of replayed times.
uint128 times;
// The queue index of lastest replayed one. If it is zero, it means the message has not been replayed.
// The queue index of latest replayed one. If it is zero, it means the message has not been replayed.
uint128 lastIndex;
}

View File

@@ -167,7 +167,7 @@ abstract contract L1ERC20Gateway is IL1ERC20Gateway, IMessageDropCallback, Scrol
/// @dev Internal function to do all the deposit operations.
///
/// @param _token The token to deposit.
/// @param _to The recipient address to recieve the token in L2.
/// @param _to The recipient address to receive the token in L2.
/// @param _amount The amount of token to deposit.
/// @param _data Optional data to forward to recipient's account.
/// @param _gasLimit Gas limit required to complete the deposit on L2.

View File

@@ -25,7 +25,7 @@ contract L1GatewayRouter is OwnableUpgradeable, IL1GatewayRouter {
/// @notice The address of L1ETHGateway.
address public ethGateway;
/// @notice The addess of default ERC20 gateway, normally the L1StandardERC20Gateway contract.
/// @notice The address of default ERC20 gateway, normally the L1StandardERC20Gateway contract.
address public defaultERC20Gateway;
/// @notice Mapping from ERC20 token address to corresponding L1ERC20Gateway.

View File

@@ -97,7 +97,7 @@ contract L1StandardERC20Gateway is L1ERC20Gateway {
/// @inheritdoc IL1ERC20Gateway
function getL2ERC20Address(address _l1Token) public view override returns (address) {
// In StandardERC20Gateway, all corresponding l2 tokens are depoyed by Create2 with salt,
// In StandardERC20Gateway, all corresponding l2 tokens are deployed by Create2 with salt,
// we can calculate the l2 address directly.
bytes32 _salt = keccak256(abi.encodePacked(counterpart, keccak256(abi.encodePacked(_l1Token))));

View File

@@ -122,36 +122,4 @@ interface IScrollChain {
bytes calldata blobDataProof,
bytes calldata aggrProof
) external;
/// @notice Finalize a committed batch on layer 1 without providing proof.
/// @param batchHeader The header of current batch, see the encoding in comments of `commitBatch.
/// @param prevStateRoot The state root of parent batch.
/// @param postStateRoot The state root of current batch.
/// @param withdrawRoot The withdraw trie root of current batch.
function finalizeBatch(
bytes calldata batchHeader,
bytes32 prevStateRoot,
bytes32 postStateRoot,
bytes32 withdrawRoot
) external;
/// @notice Finalize a committed batch (with blob) on layer 1 without providing proof.
///
/// @dev Memory layout of `blobDataProof`:
/// | z | y | kzg_commitment | kzg_proof |
/// |---------|---------|----------------|-----------|
/// | bytes32 | bytes32 | bytes48 | bytes48 |
///
/// @param batchHeader The header of current batch, see the encoding in comments of `commitBatch.
/// @param prevStateRoot The state root of parent batch.
/// @param postStateRoot The state root of current batch.
/// @param withdrawRoot The withdraw trie root of current batch.
/// @param blobDataProof The proof for blob data.
function finalizeBatch4844(
bytes calldata batchHeader,
bytes32 prevStateRoot,
bytes32 postStateRoot,
bytes32 withdrawRoot,
bytes calldata blobDataProof
) external;
}

View File

@@ -89,10 +89,10 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
/// @dev Thrown when the number of batches to revert is zero.
error ErrorRevertZeroBatches();
/// @dev Thrown when the reverted batches are not in the ending of commited batch chain.
/// @dev Thrown when the reverted batches are not in the ending of committed batch chain.
error ErrorRevertNotStartFromEnd();
/// @dev Thrown when reverting a finialized batch.
/// @dev Thrown when reverting a finalized batch.
error ErrorRevertFinalizedBatch();
/// @dev Thrown when the given state root is zero.
@@ -112,11 +112,11 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
*************/
/// @dev Address of the point evaluation precompile used for EIP-4844 blob verification.
address private constant POINT_EVALUATION_PRECOMPILE_ADDR = address(0x0A);
address internal constant POINT_EVALUATION_PRECOMPILE_ADDR = address(0x0A);
/// @dev BLS Modulus value defined in EIP-4844 and the magic value returned from a successful call to the
/// point evaluation precompile
uint256 private constant BLS_MODULUS =
uint256 internal constant BLS_MODULUS =
52435875175126190479447740508185965837690552500527637822603658699938581184513;
/// @notice The chain id of the corresponding layer 2 chain.
@@ -509,104 +509,6 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
emit FinalizeBatch(_batchIndex, _batchHash, _postStateRoot, _withdrawRoot);
}
/// @inheritdoc IScrollChain
function finalizeBatch(
bytes calldata _batchHeader,
bytes32 _prevStateRoot,
bytes32 _postStateRoot,
bytes32 _withdrawRoot
) external override OnlyProver whenNotPaused {
require(_prevStateRoot != bytes32(0), "previous state root is zero");
require(_postStateRoot != bytes32(0), "new state root is zero");
// compute batch hash and verify
(uint256 memPtr, bytes32 _batchHash, uint256 _batchIndex, ) = _loadBatchHeader(_batchHeader);
// verify previous state root.
require(finalizedStateRoots[_batchIndex - 1] == _prevStateRoot, "incorrect previous state root");
// avoid duplicated verification
require(finalizedStateRoots[_batchIndex] == bytes32(0), "batch already verified");
// check and update lastFinalizedBatchIndex
unchecked {
require(lastFinalizedBatchIndex + 1 == _batchIndex, "incorrect batch index");
lastFinalizedBatchIndex = _batchIndex;
}
// record state root and withdraw root
finalizedStateRoots[_batchIndex] = _postStateRoot;
withdrawRoots[_batchIndex] = _withdrawRoot;
// Pop finalized and non-skipped message from L1MessageQueue.
_popL1Messages(
BatchHeaderV0Codec.getSkippedBitmapPtr(memPtr),
BatchHeaderV0Codec.getTotalL1MessagePopped(memPtr),
BatchHeaderV0Codec.getL1MessagePopped(memPtr)
);
emit FinalizeBatch(_batchIndex, _batchHash, _postStateRoot, _withdrawRoot);
}
/// @inheritdoc IScrollChain
/// @dev Memory layout of `_blobDataProof`:
/// ```text
/// | z | y | kzg_commitment | kzg_proof |
/// |---------|---------|----------------|-----------|
/// | bytes32 | bytes32 | bytes48 | bytes48 |
/// ```
function finalizeBatch4844(
bytes calldata _batchHeader,
bytes32 _prevStateRoot,
bytes32 _postStateRoot,
bytes32 _withdrawRoot,
bytes calldata _blobDataProof
) external override OnlyProver whenNotPaused {
if (_prevStateRoot == bytes32(0)) revert ErrorPreviousStateRootIsZero();
if (_postStateRoot == bytes32(0)) revert ErrorStateRootIsZero();
// compute batch hash and verify
(uint256 memPtr, bytes32 _batchHash, uint256 _batchIndex, ) = _loadBatchHeader(_batchHeader);
bytes32 _blobVersionedHash = BatchHeaderV1Codec.getBlobVersionedHash(memPtr);
// Calls the point evaluation precompile and verifies the output
{
(bool success, bytes memory data) = POINT_EVALUATION_PRECOMPILE_ADDR.staticcall(
abi.encodePacked(_blobVersionedHash, _blobDataProof)
);
// We verify that the point evaluation precompile call was successful by testing the latter 32 bytes of the
// response is equal to BLS_MODULUS as defined in https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile
if (!success) revert ErrorCallPointEvaluationPrecompileFailed();
(, uint256 result) = abi.decode(data, (uint256, uint256));
if (result != BLS_MODULUS) revert ErrorUnexpectedPointEvaluationPrecompileOutput();
}
// verify previous state root.
if (finalizedStateRoots[_batchIndex - 1] != _prevStateRoot) revert ErrorIncorrectPreviousStateRoot();
// avoid duplicated verification
if (finalizedStateRoots[_batchIndex] != bytes32(0)) revert ErrorBatchIsAlreadyVerified();
// check and update lastFinalizedBatchIndex
unchecked {
if (lastFinalizedBatchIndex + 1 != _batchIndex) revert ErrorIncorrectBatchIndex();
lastFinalizedBatchIndex = _batchIndex;
}
// record state root and withdraw root
finalizedStateRoots[_batchIndex] = _postStateRoot;
withdrawRoots[_batchIndex] = _withdrawRoot;
// Pop finalized and non-skipped message from L1MessageQueue.
_popL1Messages(
BatchHeaderV1Codec.getSkippedBitmapPtr(memPtr),
BatchHeaderV1Codec.getTotalL1MessagePopped(memPtr),
BatchHeaderV1Codec.getL1MessagePopped(memPtr)
);
emit FinalizeBatch(_batchIndex, _batchHash, _postStateRoot, _withdrawRoot);
}
/************************
* Restricted Functions *
************************/
@@ -678,7 +580,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
/// @param _chunks The list of chunks to commit.
/// @param _skippedL1MessageBitmap The bitmap indicates whether each L1 message is skipped or not.
/// @return _batchDataHash The computed data hash for the list of chunks.
/// @return _totalL1MessagesPoppedInBatch The total number of L1 messages poped in this batch, including skipped one.
/// @return _totalL1MessagesPoppedInBatch The total number of L1 messages popped in this batch, including skipped one.
function _commitChunksV0(
uint256 _totalL1MessagesPoppedOverall,
bytes[] memory _chunks,
@@ -725,7 +627,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
/// @param _skippedL1MessageBitmap The bitmap indicates whether each L1 message is skipped or not.
/// @return _blobVersionedHash The blob versioned hash for the blob carried in this transaction.
/// @return _batchDataHash The computed data hash for the list of chunks.
/// @return _totalL1MessagesPoppedInBatch The total number of L1 messages poped in this batch, including skipped one.
/// @return _totalL1MessagesPoppedInBatch The total number of L1 messages popped in this batch, including skipped one.
function _commitChunksV1(
uint256 _totalL1MessagesPoppedOverall,
bytes[] memory _chunks,
@@ -1048,7 +950,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
/// @dev Internal function to pop finalized l1 messages.
/// @param bitmapPtr The memory offset of `skippedL1MessageBitmap`.
/// @param totalL1MessagePopped The total number of L1 messages poped in all batches including current batch.
/// @param totalL1MessagePopped The total number of L1 messages popped in all batches including current batch.
/// @param l1MessagePopped The number of L1 messages popped in current batch.
function _popL1Messages(
uint256 bitmapPtr,

View File

@@ -58,7 +58,11 @@ contract L2CustomERC20Gateway is L2ERC20Gateway {
/// @param _counterpart The address of `L1CustomERC20Gateway` contract in L1.
/// @param _router The address of `L2GatewayRouter` contract in L2.
/// @param _messenger The address of `L2ScrollMessenger` contract in L2.
function initialize(address _counterpart, address _router, address _messenger) external initializer {
function initialize(
address _counterpart,
address _router,
address _messenger
) external initializer {
ScrollGatewayBase._initialize(_counterpart, _router, _messenger);
}

View File

@@ -21,7 +21,7 @@ contract L2GatewayRouter is OwnableUpgradeable, IL2GatewayRouter {
/// @notice The address of L2ETHGateway.
address public ethGateway;
/// @notice The addess of default L2 ERC20 gateway, normally the L2StandardERC20Gateway contract.
/// @notice The address of default L2 ERC20 gateway, normally the L2StandardERC20Gateway contract.
address public defaultERC20Gateway;
/// @notice Mapping from L2 ERC20 token address to corresponding L2ERC20Gateway.

View File

@@ -242,7 +242,7 @@ contract L1BlockContainer is OwnableBase, IL1BlockContainer {
let _computedBlockHash := keccak256(memPtr, headerPayloadLength)
require(eq(_blockHash, _computedBlockHash), "Block hash mismatch")
// load 16 vaules
// load 16 values
for {
let i := 0
} lt(i, 16) {

View File

@@ -60,7 +60,7 @@ contract L1GasPriceOracle is OwnableBase, IL1GasPriceOracle {
/// commit_scalar = commit_gas_per_tx * fluctuation_multiplier * 1e9
/// ```
/// So, the value should not exceed 10^9 * 1e9 normally.
uint256 private constant MAX_COMMIT_SCALAR = 10 ** 9 * PRECISION;
uint256 private constant MAX_COMMIT_SCALAR = 10**9 * PRECISION;
/// @dev The maximum possible l1 blob fee scalar after Curie.
/// We derive the blob scalar by
@@ -68,7 +68,7 @@ contract L1GasPriceOracle is OwnableBase, IL1GasPriceOracle {
/// blob_scalar = fluctuation_multiplier / compression_ratio / blob_util_ratio * 1e9
/// ```
/// So, the value should not exceed 10^9 * 1e9 normally.
uint256 private constant MAX_BLOB_SCALAR = 10 ** 9 * PRECISION;
uint256 private constant MAX_BLOB_SCALAR = 10**9 * PRECISION;
/*************
* Variables *
@@ -113,6 +113,9 @@ contract L1GasPriceOracle is OwnableBase, IL1GasPriceOracle {
constructor(address _owner) {
_transferOwnership(_owner);
// by default we enable Curie from genesis
isCurie = true;
}
/*************************
@@ -150,10 +153,11 @@ contract L1GasPriceOracle is OwnableBase, IL1GasPriceOracle {
}
/// @inheritdoc IL1GasPriceOracle
function setL1BaseFeeAndBlobBaseFee(
uint256 _l1BaseFee,
uint256 _l1BlobBaseFee
) external override onlyWhitelistedSender {
function setL1BaseFeeAndBlobBaseFee(uint256 _l1BaseFee, uint256 _l1BlobBaseFee)
external
override
onlyWhitelistedSender
{
l1BaseFee = _l1BaseFee;
l1BlobBaseFee = _l1BlobBaseFee;

View File

@@ -169,7 +169,12 @@ contract L1BatchBridgeGateway is AccessControlEnumerableUpgradeable, ReentrancyG
/// @param _router The address of `L1GatewayRouter` contract in L1.
/// @param _messenger The address of `L1ScrollMessenger` contract in L1.
/// @param _queue The address of `L1MessageQueue` contract in L1.
constructor(address _counterpart, address _router, address _messenger, address _queue) {
constructor(
address _counterpart,
address _router,
address _messenger,
address _queue
) {
_disableInitializers();
counterpart = _counterpart;
@@ -335,7 +340,11 @@ contract L1BatchBridgeGateway is AccessControlEnumerableUpgradeable, ReentrancyG
/// @param token The address of token to deposit.
/// @param sender The address of token sender.
/// @param amount The amount of token to deposit.
function _deposit(address token, address sender, uint96 amount) internal {
function _deposit(
address token,
address sender,
uint96 amount
) internal {
BatchConfig memory cachedBatchConfig = configs[token];
TokenState memory cachedTokenState = tokens[token];
_tryFinalizeCurrentBatch(token, cachedBatchConfig, cachedTokenState);
@@ -400,7 +409,11 @@ contract L1BatchBridgeGateway is AccessControlEnumerableUpgradeable, ReentrancyG
/// @param token The address of token.
/// @param receiver The address of token receiver.
/// @param amount The amount of token to transfer.
function _transferToken(address token, address receiver, uint256 amount) private {
function _transferToken(
address token,
address receiver,
uint256 amount
) private {
if (token == address(0)) {
(bool success, ) = receiver.call{value: amount}("");
if (!success) revert ErrorTransferETHFailed();

View File

@@ -185,7 +185,11 @@ contract L2BatchBridgeGateway is AccessControlEnumerableUpgradeable {
/// @param l2Token The address of L2 token.
/// @param batchIndex The index of batch to distribute.
/// @param nodes The list of encoded L1 deposits.
function distribute(address l2Token, uint64 batchIndex, bytes32[] memory nodes) external onlyRole(KEEPER_ROLE) {
function distribute(
address l2Token,
uint64 batchIndex,
bytes32[] memory nodes
) external onlyRole(KEEPER_ROLE) {
address l1Token = tokenMapping[l2Token];
bytes32 hash = BatchBridgeCodec.encodeInitialNode(l1Token, batchIndex);
for (uint256 i = 0; i < nodes.length; i++) {
@@ -221,7 +225,11 @@ contract L2BatchBridgeGateway is AccessControlEnumerableUpgradeable {
/// @param receiver The address of token receiver.
/// @param amount The amount of token to transfer.
/// @return success Whether the transfer is successful.
function _transferToken(address token, address receiver, uint256 amount) private returns (bool success) {
function _transferToken(
address token,
address receiver,
uint256 amount
) private returns (bool success) {
if (token == address(0)) {
// We add gas limit here to avoid DDOS from malicious receiver.
(success, ) = receiver.call{value: amount, gas: SAFE_ETH_TRANSFER_GAS_LIMIT}("");

View File

@@ -23,7 +23,7 @@ contract ScrollStandardERC20Factory is Ownable, IScrollStandardERC20Factory {
/// @inheritdoc IScrollStandardERC20Factory
function computeL2TokenAddress(address _gateway, address _l1Token) external view returns (address) {
// In StandardERC20Gateway, all corresponding l2 tokens are depoyed by Create2 with salt,
// In StandardERC20Gateway, all corresponding l2 tokens are deployed by Create2 with salt,
// we can calculate the l2 address directly.
bytes32 _salt = _getSalt(_gateway, _l1Token);

View File

@@ -369,7 +369,7 @@ library PatriciaMerkleTrieVerifier {
// first item is considered the root node.
// Otherwise verifies that the hash of the current node
// is the same as the previous choosen one.
// is the same as the previous chosen one.
switch i
case 1 {
rootHash := hash
@@ -425,7 +425,7 @@ library PatriciaMerkleTrieVerifier {
}
}
// lastly, derive the path of the choosen one (TM)
// lastly, derive the path of the chosen one (TM)
path := derivePath(key, depth)
}

View File

@@ -113,7 +113,7 @@ library ZkTrieVerifier {
// first item is considered the root node.
// Otherwise verifies that the hash of the current node
// is the same as the previous choosen one.
// is the same as the previous chosen one.
switch depth
case 1 {
rootHash := hash
@@ -262,7 +262,7 @@ library ZkTrieVerifier {
ptr, storageValue := verifyStorageProof(poseidon, storageKey, storageRootHash, ptr)
// the one and only boundary check
// in case an attacker crafted a malicous payload
// in case an attacker crafted a malicious payload
// and succeeds in the prior verification steps
// then this should catch any bogus accesses
if iszero(eq(ptr, add(proof.offset, proof.length))) {

View File

@@ -0,0 +1,119 @@
// SPDX-License-Identifier: MIT
pragma solidity =0.8.24;
import {ScrollChain} from "../L1/rollup/ScrollChain.sol";
import {BatchHeaderV0Codec} from "../libraries/codec/BatchHeaderV0Codec.sol";
import {BatchHeaderV1Codec} from "../libraries/codec/BatchHeaderV1Codec.sol";
contract ScrollChainMockFinalize is ScrollChain {
/***************
* Constructor *
***************/
/// @notice Constructor for `ScrollChain` implementation contract.
///
/// @param _chainId The chain id of L2.
/// @param _messageQueue The address of `L1MessageQueue` contract.
/// @param _verifier The address of zkevm verifier contract.
constructor(
uint64 _chainId,
address _messageQueue,
address _verifier
) ScrollChain(_chainId, _messageQueue, _verifier) {}
/*****************************
* Public Mutating Functions *
*****************************/
function finalizeBatch(
bytes calldata _batchHeader,
bytes32 _prevStateRoot,
bytes32 _postStateRoot,
bytes32 _withdrawRoot
) external OnlyProver whenNotPaused {
require(_prevStateRoot != bytes32(0), "previous state root is zero");
require(_postStateRoot != bytes32(0), "new state root is zero");
// compute batch hash and verify
(uint256 memPtr, bytes32 _batchHash, uint256 _batchIndex, ) = _loadBatchHeader(_batchHeader);
// verify previous state root.
require(finalizedStateRoots[_batchIndex - 1] == _prevStateRoot, "incorrect previous state root");
// avoid duplicated verification
require(finalizedStateRoots[_batchIndex] == bytes32(0), "batch already verified");
// check and update lastFinalizedBatchIndex
unchecked {
require(lastFinalizedBatchIndex + 1 == _batchIndex, "incorrect batch index");
lastFinalizedBatchIndex = _batchIndex;
}
// record state root and withdraw root
finalizedStateRoots[_batchIndex] = _postStateRoot;
withdrawRoots[_batchIndex] = _withdrawRoot;
// Pop finalized and non-skipped message from L1MessageQueue.
_popL1Messages(
BatchHeaderV0Codec.getSkippedBitmapPtr(memPtr),
BatchHeaderV0Codec.getTotalL1MessagePopped(memPtr),
BatchHeaderV0Codec.getL1MessagePopped(memPtr)
);
emit FinalizeBatch(_batchIndex, _batchHash, _postStateRoot, _withdrawRoot);
}
function finalizeBatch4844(
bytes calldata _batchHeader,
bytes32 _prevStateRoot,
bytes32 _postStateRoot,
bytes32 _withdrawRoot,
bytes calldata _blobDataProof
) external OnlyProver whenNotPaused {
if (_prevStateRoot == bytes32(0)) revert ErrorPreviousStateRootIsZero();
if (_postStateRoot == bytes32(0)) revert ErrorStateRootIsZero();
// compute batch hash and verify
(uint256 memPtr, bytes32 _batchHash, uint256 _batchIndex, ) = _loadBatchHeader(_batchHeader);
bytes32 _blobVersionedHash = BatchHeaderV1Codec.getBlobVersionedHash(memPtr);
// Calls the point evaluation precompile and verifies the output
{
(bool success, bytes memory data) = POINT_EVALUATION_PRECOMPILE_ADDR.staticcall(
abi.encodePacked(_blobVersionedHash, _blobDataProof)
);
// We verify that the point evaluation precompile call was successful by testing the latter 32 bytes of the
// response is equal to BLS_MODULUS as defined in https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile
if (!success) revert ErrorCallPointEvaluationPrecompileFailed();
(, uint256 result) = abi.decode(data, (uint256, uint256));
if (result != BLS_MODULUS) revert ErrorUnexpectedPointEvaluationPrecompileOutput();
}
// verify previous state root.
if (finalizedStateRoots[_batchIndex - 1] != _prevStateRoot) revert ErrorIncorrectPreviousStateRoot();
// avoid duplicated verification
if (finalizedStateRoots[_batchIndex] != bytes32(0)) revert ErrorBatchIsAlreadyVerified();
// check and update lastFinalizedBatchIndex
unchecked {
if (lastFinalizedBatchIndex + 1 != _batchIndex) revert ErrorIncorrectBatchIndex();
lastFinalizedBatchIndex = _batchIndex;
}
// record state root and withdraw root
finalizedStateRoots[_batchIndex] = _postStateRoot;
withdrawRoots[_batchIndex] = _withdrawRoot;
// Pop finalized and non-skipped message from L1MessageQueue.
_popL1Messages(
BatchHeaderV1Codec.getSkippedBitmapPtr(memPtr),
BatchHeaderV1Codec.getTotalL1MessagePopped(memPtr),
BatchHeaderV1Codec.getL1MessagePopped(memPtr)
);
emit FinalizeBatch(_batchIndex, _batchHash, _postStateRoot, _withdrawRoot);
}
}

View File

@@ -11,8 +11,8 @@ contract L1GasPriceOracleTest is DSTestPlus {
uint256 private constant PRECISION = 1e9;
uint256 private constant MAX_OVERHEAD = 30000000 / 16;
uint256 private constant MAX_SCALAR = 1000 * PRECISION;
uint256 private constant MAX_COMMIT_SCALAR = 10 ** 9 * PRECISION;
uint256 private constant MAX_BLOB_SCALAR = 10 ** 9 * PRECISION;
uint256 private constant MAX_COMMIT_SCALAR = 10**9 * PRECISION;
uint256 private constant MAX_BLOB_SCALAR = 10**9 * PRECISION;
L1GasPriceOracle private oracle;
Whitelist private whitelist;

View File

@@ -173,7 +173,11 @@ contract L1BatchBridgeGatewayTest is L1GatewayTestBase {
assertEq(safeBridgeGasLimit, config.safeBridgeGasLimit);
}
function checkBatchState(address token, uint256 phase, L1BatchBridgeGateway.BatchState memory expected) private {
function checkBatchState(
address token,
uint256 phase,
L1BatchBridgeGateway.BatchState memory expected
) private {
(uint128 amount, uint64 startTime, uint64 numDeposits, bytes32 hash) = batch.batches(token, phase);
assertEq(amount, expected.amount);
assertEq(startTime, expected.startTime);

View File

@@ -10,7 +10,11 @@ contract RevertOnTransferToken is MockERC20 {
bool private revertOnTransfer;
bool private transferReturn;
constructor(string memory _name, string memory _symbol, uint8 _decimals) MockERC20(_name, _symbol, _decimals) {
constructor(
string memory _name,
string memory _symbol,
uint8 _decimals
) MockERC20(_name, _symbol, _decimals) {
transferReturn = true;
}

View File

@@ -242,21 +242,28 @@ func (bp *BatchProverTask) assignWithTwoCircuits(ctx *gin.Context, taskCtx *prov
chunkRanges [2]*chunkIndexRange
err error
)
var chunkRange *chunkIndexRange
for i := 0; i < 2; i++ {
hardForkNames[i] = bp.reverseVkMap[getTaskParameter.VKs[i]]
chunkRanges[i], err = bp.getChunkRangeByName(ctx, hardForkNames[i])
if err != nil {
return nil, err
}
if chunkRanges[i] == nil {
return nil, nil
if chunkRanges[i] != nil {
if chunkRange == nil {
chunkRange = chunkRanges[i]
} else {
chunkRange = chunkRange.merge(*chunkRanges[i])
}
}
}
chunkRange := chunkRanges[0].merge(*chunkRanges[1])
if chunkRange == nil {
return nil, nil
}
var hardForkName string
getHardForkName := func(batch *orm.Batch) (string, error) {
for i := 0; i < 2; i++ {
if chunkRanges[i].contains(batch.StartChunkIndex, batch.EndChunkIndex) {
if chunkRanges[i] != nil && chunkRanges[i].contains(batch.StartChunkIndex, batch.EndChunkIndex) {
hardForkName = hardForkNames[i]
break
}

View File

@@ -309,7 +309,7 @@ func (o *Batch) UpdateProvingStatusFailed(ctx context.Context, hash string, maxA
db = db.Model(&Batch{})
db = db.Where("hash", hash)
db = db.Where("total_attempts >= ?", maxAttempts)
db = db.Where("proving_status != ?", int(types.ProverProofValid))
db = db.Where("proving_status != ?", int(types.ProvingTaskVerified))
if err := db.Update("proving_status", int(types.ProvingTaskFailed)).Error; err != nil {
return fmt.Errorf("Batch.UpdateProvingStatus error: %w, batch hash: %v, status: %v", err, hash, types.ProvingTaskFailed.String())
}

View File

@@ -332,7 +332,7 @@ func (o *Chunk) UpdateProvingStatusFailed(ctx context.Context, hash string, maxA
db = db.Model(&Chunk{})
db = db.Where("hash", hash)
db = db.Where("total_attempts >= ?", maxAttempts)
db = db.Where("proving_status != ?", int(types.ProverProofValid))
db = db.Where("proving_status != ?", int(types.ProvingTaskVerified))
if err := db.Update("proving_status", int(types.ProvingTaskFailed)).Error; err != nil {
return fmt.Errorf("Batch.UpdateProvingStatus error: %w, batch hash: %v, status: %v", err, hash, types.ProvingTaskFailed.String())
}

33
prover/Cargo.lock generated
View File

@@ -59,12 +59,12 @@ dependencies = [
[[package]]
name = "aggregator"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
"c-kzg",
"ctor",
"ctor 0.1.26",
"encoder",
"env_logger 0.10.2",
"eth-types 0.11.0",
@@ -661,7 +661,7 @@ dependencies = [
[[package]]
name = "bus-mapping"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.11.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -1036,6 +1036,16 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "ctor"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f"
dependencies = [
"quote",
"syn 2.0.66",
]
[[package]]
name = "ctr"
version = "0.9.2"
@@ -1381,7 +1391,7 @@ dependencies = [
[[package]]
name = "eth-types"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"base64 0.13.1",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -1615,7 +1625,7 @@ dependencies = [
[[package]]
name = "external-tracer"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.11.0",
"geth-utils 0.11.0",
@@ -1853,7 +1863,7 @@ dependencies = [
[[package]]
name = "gadgets"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.11.0",
"halo2_proofs",
@@ -1886,7 +1896,7 @@ dependencies = [
[[package]]
name = "geth-utils"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"env_logger 0.10.2",
"gobuild",
@@ -2785,7 +2795,7 @@ dependencies = [
[[package]]
name = "mock"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.11.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -2815,7 +2825,7 @@ dependencies = [
[[package]]
name = "mpt-zktrie"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.11.0",
"halo2curves",
@@ -3378,6 +3388,7 @@ dependencies = [
"anyhow",
"base64 0.13.1",
"clap",
"ctor 0.2.8",
"env_logger 0.11.3",
"eth-keystore",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -3440,7 +3451,7 @@ dependencies = [
[[package]]
name = "prover"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"aggregator 0.11.0",
"anyhow",
@@ -5584,7 +5595,7 @@ dependencies = [
[[package]]
name = "zkevm-circuits"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"array-init",
"bus-mapping 0.11.0",

View File

@@ -30,7 +30,7 @@ ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branc
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "v0.10", default-features = false, features = ["parallel_syn", "scroll", "shanghai"] }
prover_next = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.1", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
prover_next = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.4", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
base64 = "0.13.1"
reqwest = { version = "0.12.4", features = ["gzip"] }
reqwest-middleware = "0.3"
@@ -45,3 +45,4 @@ tokio = "1.37.0"
sled = "0.34.7"
http = "1.1.0"
clap = { version = "4.5", features = ["derive"] }
ctor = "0.2.8"

View File

@@ -1,4 +1,4 @@
.PHONY: prover
.PHONY: prover lint tests_binary
ifeq (4.3,$(firstword $(sort $(MAKE_VERSION) 4.3)))
HALO2_VERSION=$(shell grep -m 1 "halo2.git" ./Cargo.lock | cut -d "#" -f2 | cut -c-7)
@@ -38,4 +38,15 @@ endif
prover:
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build --release
rm -rf ./lib && mkdir ./lib
find target/ -name "libzktrie.so" | xargs -I{} cp {} ./lib
find target/ -name "libzktrie.so" | xargs -I{} cp {} ./lib
tests_binary:
cargo clean && cargo test --release --no-run
ls target/release/deps/prover* | grep -v "\.d" | xargs -I{} ln -sf {} ./prover.test
rm -rf ./lib && mkdir ./lib
find target/ -name "libzktrie.so" | xargs -I{} cp {} ./lib
lint:
cargo check --all-features
cargo clippy --all-features --all-targets -- -D warnings
cargo fmt --all

View File

@@ -1,3 +1,5 @@
#![feature(lazy_cell)]
mod config;
mod coordinator_client;
mod geth_client;

View File

@@ -6,8 +6,8 @@ use serde::Deserialize;
use crate::types::{CommonHash, Task};
use prover::{
aggregator::Prover as BatchProver, zkevm::Prover as ChunkProver, BlockTrace, ChunkHash,
ChunkProof,
aggregator::Prover as BatchProver, zkevm::Prover as ChunkProver, BatchProof, BlockTrace,
ChunkHash, ChunkProof,
};
use std::{cell::RefCell, cmp::Ordering, env, rc::Rc};
@@ -60,8 +60,7 @@ impl BaseCircuitsHandler {
}
}
fn gen_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
let chunk_trace = self.gen_chunk_traces(task)?;
fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
if let Some(prover) = self.chunk_prover.as_ref() {
let chunk_proof = prover.borrow_mut().gen_chunk_proof(
chunk_trace,
@@ -70,34 +69,50 @@ impl BaseCircuitsHandler {
self.get_output_dir(),
)?;
return serde_json::to_string(&chunk_proof).map_err(|e| anyhow::anyhow!(e));
return Ok(chunk_proof);
}
unreachable!("please check errors in proof_type logic")
}
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
let chunk_hashes_proofs: Vec<(ChunkHash, ChunkProof)> =
self.gen_chunk_hashes_proofs(task)?;
let chunk_proofs: Vec<ChunkProof> =
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
fn gen_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
let chunk_trace = self.gen_chunk_traces(task)?;
let chunk_proof = self.gen_chunk_proof_raw(chunk_trace)?;
Ok(serde_json::to_string(&chunk_proof)?)
}
fn gen_batch_proof_raw(
&self,
chunk_hashes_proofs: Vec<(ChunkHash, ChunkProof)>,
) -> Result<BatchProof> {
if let Some(prover) = self.batch_prover.as_ref() {
let chunk_proofs: Vec<ChunkProof> =
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
let is_valid = prover.borrow_mut().check_chunk_proofs(&chunk_proofs);
if !is_valid {
bail!("non-match chunk protocol, task-id: {}", &task.id)
bail!("non-match chunk protocol")
}
let batch_proof = prover.borrow_mut().gen_agg_evm_proof(
chunk_hashes_proofs,
None,
self.get_output_dir(),
)?;
return serde_json::to_string(&batch_proof).map_err(|e| anyhow::anyhow!(e));
return Ok(batch_proof);
}
unreachable!("please check errors in proof_type logic")
}
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
log::info!("[circuit] gen_batch_proof for task {}", task.id);
let chunk_hashes_proofs: Vec<(ChunkHash, ChunkProof)> =
self.gen_chunk_hashes_proofs(task)?;
let batch_proof = self.gen_batch_proof_raw(chunk_hashes_proofs)?;
Ok(serde_json::to_string(&batch_proof)?)
}
fn get_output_dir(&self) -> Option<&str> {
OUTPUT_DIR.as_deref()
}
@@ -195,3 +210,178 @@ impl CircuitsHandler for BaseCircuitsHandler {
}
}
}
// =================================== tests module ========================================
#[cfg(test)]
mod tests {
use super::*;
use crate::zk_circuits_handler::utils::encode_vk;
use prover::utils::chunk_trace_to_witness_block;
use std::{path::PathBuf, sync::LazyLock};
#[ctor::ctor]
fn init() {
crate::utils::log_init(None);
log::info!("logger initialized");
}
static DEFAULT_WORK_DIR: &str = "/assets";
static WORK_DIR: LazyLock<String> = LazyLock::new(|| {
std::env::var("BERNOULLI_TEST_DIR")
.unwrap_or(String::from(DEFAULT_WORK_DIR))
.trim_end_matches('/')
.to_string()
});
static PARAMS_PATH: LazyLock<String> = LazyLock::new(|| format!("{}/test_params", *WORK_DIR));
static ASSETS_PATH: LazyLock<String> = LazyLock::new(|| format!("{}/test_assets", *WORK_DIR));
static PROOF_DUMP_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/proof_data", *WORK_DIR));
static BATCH_DIR_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/traces/batch_24", *WORK_DIR));
static BATCH_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/agg_vk.vkey", *WORK_DIR));
static CHUNK_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/chunk_vk.vkey", *WORK_DIR));
#[test]
fn it_works() {
let result = true;
assert!(result);
}
#[test]
fn test_circuits() -> Result<()> {
let chunk_handler =
BaseCircuitsHandler::new(ProofType::Chunk, &PARAMS_PATH, &ASSETS_PATH, None)?;
let chunk_vk = chunk_handler.get_vk(ProofType::Chunk).unwrap();
check_vk(ProofType::Chunk, chunk_vk, "chunk vk must be available");
let chunk_dir_paths = get_chunk_dir_paths()?;
log::info!("chunk_dir_paths, {:?}", chunk_dir_paths);
let mut chunk_infos = vec![];
let mut chunk_proofs = vec![];
for (id, chunk_path) in chunk_dir_paths.into_iter().enumerate() {
let chunk_id = format!("chunk_proof{}", id + 1);
log::info!("start to process {chunk_id}");
let chunk_trace = read_chunk_trace(chunk_path)?;
let chunk_info = traces_to_chunk_info(chunk_trace.clone())?;
chunk_infos.push(chunk_info);
log::info!("start to prove {chunk_id}");
let chunk_proof = chunk_handler.gen_chunk_proof_raw(chunk_trace)?;
let proof_data = serde_json::to_string(&chunk_proof)?;
dump_proof(chunk_id, proof_data)?;
chunk_proofs.push(chunk_proof);
}
let batch_handler =
BaseCircuitsHandler::new(ProofType::Batch, &PARAMS_PATH, &ASSETS_PATH, None)?;
let batch_vk = batch_handler.get_vk(ProofType::Batch).unwrap();
check_vk(ProofType::Batch, batch_vk, "batch vk must be available");
let chunk_hashes_proofs = chunk_infos.into_iter().zip(chunk_proofs).collect();
log::info!("start to prove batch");
let batch_proof = batch_handler.gen_batch_proof_raw(chunk_hashes_proofs)?;
let proof_data = serde_json::to_string(&batch_proof)?;
dump_proof("batch_proof".to_string(), proof_data)?;
Ok(())
}
fn check_vk(proof_type: ProofType, vk: Vec<u8>, info: &str) {
log::info!("check_vk, {:?}", proof_type);
let vk_from_file = read_vk(proof_type).unwrap();
assert_eq!(vk_from_file, encode_vk(vk), "{info}")
}
fn read_vk(proof_type: ProofType) -> Result<String> {
log::info!("read_vk, {:?}", proof_type);
let vk_file = match proof_type {
ProofType::Chunk => CHUNK_VK_PATH.clone(),
ProofType::Batch => BATCH_VK_PATH.clone(),
ProofType::Undefined => unreachable!(),
};
let data = std::fs::read(vk_file)?;
Ok(encode_vk(data))
}
fn read_chunk_trace(path: PathBuf) -> Result<Vec<BlockTrace>> {
log::info!("read_chunk_trace, {:?}", path);
let mut chunk_trace: Vec<BlockTrace> = vec![];
fn read_block_trace(file: &PathBuf) -> Result<BlockTrace> {
let f = std::fs::File::open(file)?;
Ok(serde_json::from_reader(&f)?)
}
if path.is_dir() {
let entries = std::fs::read_dir(&path)?;
let mut files: Vec<String> = entries
.into_iter()
.filter_map(|e| {
if e.is_err() {
return None;
}
let entry = e.unwrap();
if entry.path().is_dir() {
return None;
}
if let Result::Ok(file_name) = entry.file_name().into_string() {
Some(file_name)
} else {
None
}
})
.collect();
files.sort();
log::info!("files in chunk {:?} is {:?}", path, files);
for file in files {
let block_trace = read_block_trace(&path.join(file))?;
chunk_trace.push(block_trace);
}
} else {
let block_trace = read_block_trace(&path)?;
chunk_trace.push(block_trace);
}
Ok(chunk_trace)
}
fn get_chunk_dir_paths() -> Result<Vec<PathBuf>> {
let batch_path = PathBuf::from(BATCH_DIR_PATH.clone());
let entries = std::fs::read_dir(&batch_path)?;
let mut files: Vec<String> = entries
.filter_map(|e| {
if e.is_err() {
return None;
}
let entry = e.unwrap();
if entry.path().is_dir() {
if let Result::Ok(file_name) = entry.file_name().into_string() {
Some(file_name)
} else {
None
}
} else {
None
}
})
.collect();
files.sort();
log::info!("files in batch {:?} is {:?}", batch_path, files);
Ok(files.into_iter().map(|f| batch_path.join(f)).collect())
}
fn traces_to_chunk_info(chunk_trace: Vec<BlockTrace>) -> Result<ChunkHash> {
let witness_block = chunk_trace_to_witness_block(chunk_trace)?;
Ok(ChunkHash::from_witness_block(&witness_block, false))
}
fn dump_proof(id: String, proof_data: String) -> Result<()> {
let dump_path = PathBuf::from(PROOF_DUMP_PATH.clone());
Ok(std::fs::write(dump_path.join(id), proof_data)?)
}
}

View File

@@ -8,7 +8,7 @@ use std::{cell::RefCell, cmp::Ordering, rc::Rc};
use prover_next::{
aggregator::Prover as BatchProver, check_chunk_hashes, zkevm::Prover as ChunkProver,
BatchProvingTask, BlockTrace, ChunkInfo, ChunkProof, ChunkProvingTask,
BatchProof, BatchProvingTask, BlockTrace, ChunkInfo, ChunkProof, ChunkProvingTask,
};
use super::bernoulli::OUTPUT_DIR;
@@ -59,8 +59,7 @@ impl NextCircuitsHandler {
}
}
fn gen_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
let chunk_trace = self.gen_chunk_traces(task)?;
fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
if let Some(prover) = self.chunk_prover.as_ref() {
let chunk = ChunkProvingTask::from(chunk_trace);
@@ -69,22 +68,29 @@ impl NextCircuitsHandler {
.borrow_mut()
.gen_chunk_proof(chunk, None, None, self.get_output_dir())?;
return serde_json::to_string(&chunk_proof).map_err(|e| anyhow::anyhow!(e));
return Ok(chunk_proof);
}
unreachable!("please check errors in proof_type logic")
}
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
fn gen_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
let chunk_trace = self.gen_chunk_traces(task)?;
let chunk_proof = self.gen_chunk_proof_raw(chunk_trace)?;
Ok(serde_json::to_string(&chunk_proof)?)
}
fn gen_batch_proof_raw(
&self,
chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)>,
) -> Result<BatchProof> {
if let Some(prover) = self.batch_prover.as_ref() {
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> =
self.gen_chunk_hashes_proofs(task)?;
let chunk_proofs: Vec<ChunkProof> =
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
let is_valid = prover.borrow_mut().check_protocol_of_chunks(&chunk_proofs);
if !is_valid {
bail!("non-match chunk protocol, task-id: {}", &task.id)
bail!("non-match chunk protocol")
}
check_chunk_hashes("", &chunk_hashes_proofs).context("failed to check chunk info")?;
let batch = BatchProvingTask { chunk_proofs };
@@ -93,11 +99,19 @@ impl NextCircuitsHandler {
.borrow_mut()
.gen_agg_evm_proof(batch, None, self.get_output_dir())?;
return serde_json::to_string(&batch_proof).map_err(|e| anyhow::anyhow!(e));
return Ok(batch_proof);
}
unreachable!("please check errors in proof_type logic")
}
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
log::info!("[circuit] gen_batch_proof for task {}", task.id);
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> =
self.gen_chunk_hashes_proofs(task)?;
let batch_proof = self.gen_batch_proof_raw(chunk_hashes_proofs)?;
Ok(serde_json::to_string(&batch_proof)?)
}
fn get_output_dir(&self) -> Option<&str> {
OUTPUT_DIR.as_deref()
}
@@ -195,3 +209,178 @@ impl CircuitsHandler for NextCircuitsHandler {
}
}
}
// =================================== tests module ========================================
#[cfg(test)]
mod tests {
use super::*;
use crate::zk_circuits_handler::utils::encode_vk;
use prover_next::utils::chunk_trace_to_witness_block;
use std::{path::PathBuf, sync::LazyLock};
#[ctor::ctor]
fn init() {
crate::utils::log_init(None);
log::info!("logger initialized");
}
static DEFAULT_WORK_DIR: &str = "/assets";
static WORK_DIR: LazyLock<String> = LazyLock::new(|| {
std::env::var("CURIE_TEST_DIR")
.unwrap_or(String::from(DEFAULT_WORK_DIR))
.trim_end_matches('/')
.to_string()
});
static PARAMS_PATH: LazyLock<String> = LazyLock::new(|| format!("{}/test_params", *WORK_DIR));
static ASSETS_PATH: LazyLock<String> = LazyLock::new(|| format!("{}/test_assets", *WORK_DIR));
static PROOF_DUMP_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/proof_data", *WORK_DIR));
static BATCH_DIR_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/traces/batch_24", *WORK_DIR));
static BATCH_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/agg_vk.vkey", *WORK_DIR));
static CHUNK_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/chunk_vk.vkey", *WORK_DIR));
#[test]
fn it_works() {
let result = true;
assert!(result);
}
#[test]
fn test_circuits() -> Result<()> {
let chunk_handler =
NextCircuitsHandler::new(ProofType::Chunk, &PARAMS_PATH, &ASSETS_PATH, None)?;
let chunk_vk = chunk_handler.get_vk(ProofType::Chunk).unwrap();
check_vk(ProofType::Chunk, chunk_vk, "chunk vk must be available");
let chunk_dir_paths = get_chunk_dir_paths()?;
log::info!("chunk_dir_paths, {:?}", chunk_dir_paths);
let mut chunk_infos = vec![];
let mut chunk_proofs = vec![];
for (id, chunk_path) in chunk_dir_paths.into_iter().enumerate() {
let chunk_id = format!("chunk_proof{}", id + 1);
log::info!("start to process {chunk_id}");
let chunk_trace = read_chunk_trace(chunk_path)?;
let chunk_info = traces_to_chunk_info(chunk_trace.clone())?;
chunk_infos.push(chunk_info);
log::info!("start to prove {chunk_id}");
let chunk_proof = chunk_handler.gen_chunk_proof_raw(chunk_trace)?;
let proof_data = serde_json::to_string(&chunk_proof)?;
dump_proof(chunk_id, proof_data)?;
chunk_proofs.push(chunk_proof);
}
let batch_handler =
NextCircuitsHandler::new(ProofType::Batch, &PARAMS_PATH, &ASSETS_PATH, None)?;
let batch_vk = batch_handler.get_vk(ProofType::Batch).unwrap();
check_vk(ProofType::Batch, batch_vk, "batch vk must be available");
let chunk_hashes_proofs = chunk_infos.into_iter().zip(chunk_proofs).collect();
log::info!("start to prove batch");
let batch_proof = batch_handler.gen_batch_proof_raw(chunk_hashes_proofs)?;
let proof_data = serde_json::to_string(&batch_proof)?;
dump_proof("batch_proof".to_string(), proof_data)?;
Ok(())
}
fn check_vk(proof_type: ProofType, vk: Vec<u8>, info: &str) {
log::info!("check_vk, {:?}", proof_type);
let vk_from_file = read_vk(proof_type).unwrap();
assert_eq!(vk_from_file, encode_vk(vk), "{info}")
}
fn read_vk(proof_type: ProofType) -> Result<String> {
log::info!("read_vk, {:?}", proof_type);
let vk_file = match proof_type {
ProofType::Chunk => CHUNK_VK_PATH.clone(),
ProofType::Batch => BATCH_VK_PATH.clone(),
ProofType::Undefined => unreachable!(),
};
let data = std::fs::read(vk_file)?;
Ok(encode_vk(data))
}
fn read_chunk_trace(path: PathBuf) -> Result<Vec<BlockTrace>> {
log::info!("read_chunk_trace, {:?}", path);
let mut chunk_trace: Vec<BlockTrace> = vec![];
fn read_block_trace(file: &PathBuf) -> Result<BlockTrace> {
let f = std::fs::File::open(file)?;
Ok(serde_json::from_reader(&f)?)
}
if path.is_dir() {
let entries = std::fs::read_dir(&path)?;
let mut files: Vec<String> = entries
.into_iter()
.filter_map(|e| {
if e.is_err() {
return None;
}
let entry = e.unwrap();
if entry.path().is_dir() {
return None;
}
if let Result::Ok(file_name) = entry.file_name().into_string() {
Some(file_name)
} else {
None
}
})
.collect();
files.sort();
log::info!("files in chunk {:?} is {:?}", path, files);
for file in files {
let block_trace = read_block_trace(&path.join(file))?;
chunk_trace.push(block_trace);
}
} else {
let block_trace = read_block_trace(&path)?;
chunk_trace.push(block_trace);
}
Ok(chunk_trace)
}
fn get_chunk_dir_paths() -> Result<Vec<PathBuf>> {
let batch_path = PathBuf::from(BATCH_DIR_PATH.clone());
let entries = std::fs::read_dir(&batch_path)?;
let mut files: Vec<String> = entries
.filter_map(|e| {
if e.is_err() {
return None;
}
let entry = e.unwrap();
if entry.path().is_dir() {
if let Result::Ok(file_name) = entry.file_name().into_string() {
Some(file_name)
} else {
None
}
} else {
None
}
})
.collect();
files.sort();
log::info!("files in batch {:?} is {:?}", batch_path, files);
Ok(files.into_iter().map(|f| batch_path.join(f)).collect())
}
fn traces_to_chunk_info(chunk_trace: Vec<BlockTrace>) -> Result<ChunkInfo> {
let witness_block = chunk_trace_to_witness_block(chunk_trace)?;
Ok(ChunkInfo::from_witness_block(&witness_block, false))
}
fn dump_proof(id: String, proof_data: String) -> Result<()> {
let dump_path = PathBuf::from(PROOF_DUMP_PATH.clone());
Ok(std::fs::write(dump_path.join(id), proof_data)?)
}
}

View File

@@ -34,12 +34,7 @@ import (
rutils "scroll-tech/rollup/internal/utils"
)
// Layer2Relayer is responsible for
// 1. Committing and finalizing L2 blocks on L1
// 2. Relaying messages from L2 to L1
//
// Actions are triggered by new head from layer 1 geth node.
// @todo It's better to be triggered by watcher.
// Layer2Relayer is responsible for committing and finalizing L2 blocks on L1.
type Layer2Relayer struct {
ctx context.Context
@@ -602,7 +597,7 @@ func (r *Layer2Relayer) finalizeBatch(dbBatch *orm.Batch, withProof bool) error
log.Info("finalizeBatch in layer1", "with proof", withProof, "index", dbBatch.Index, "batch hash", dbBatch.Hash, "tx hash", txHash.String())
// record and sync with db, @todo handle db error
// Updating rollup status in database.
if err := r.batchOrm.UpdateFinalizeTxHashAndRollupStatus(r.ctx, dbBatch.Hash, txHash.String(), types.RollupFinalizing); err != nil {
log.Error("UpdateFinalizeTxHashAndRollupStatus failed", "index", dbBatch.Index, "batch hash", dbBatch.Hash, "tx hash", txHash.String(), "err", err)
return err