Compare commits

..

49 Commits

Author SHA1 Message Date
colinlyguo
edffc64bcc fix 2023-12-17 18:16:12 +08:00
colinlyguo
acd7845383 fix 2023-12-17 14:39:31 +08:00
colinlyguo
5e2fe452d2 change start height 2023-12-16 23:52:43 +08:00
colinlyguo
a5ca547961 bug fixes 2023-12-16 23:33:34 +08:00
georgehao
8d034de87e feat: tweak some code (#1052)
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
2023-12-15 18:22:23 +08:00
colinlyguo
9484c1c85f Merge branch 'develop' into fix-bridge-history-api-write-db 2023-12-14 22:43:42 +08:00
georgehao
7011c3ee2e feat: update some layer logic (#1050)
Co-authored-by: colinlyguo <colinlyguo@scroll.io>
2023-12-14 22:11:17 +08:00
colinlyguo
2f7e329aec fix duplicated update reverted txs 2023-12-14 09:05:19 +08:00
colinlyguo
33ec1e1dde add withdraw root check 2023-12-14 00:22:17 +08:00
colinlyguo
5c1de26ccc split relayed failed to failed relayed and relayed tx reverted 2023-12-13 23:47:16 +08:00
colinlyguo
bb1a7f6921 add 0x prefix in merkle proof 2023-12-13 23:17:38 +08:00
colinlyguo
fef9f83788 go mod tidy 2023-12-13 17:44:26 +08:00
colinlyguo
1075d7d671 chore: auto version bump [bot] 2023-12-13 09:42:52 +00:00
colinlyguo
420c96d15a go mod tidy 2023-12-13 17:42:27 +08:00
colinlyguo
3f9e50052e Merge branch 'develop' into fix-bridge-history-api-write-db 2023-12-13 17:41:18 +08:00
colinlyguo
04006b3ead update message_type in relayed messages 2023-12-13 17:30:00 +08:00
colinlyguo
7fe751f7c5 fix ERROR: ON CONFLICT DO UPDATE command cannot affect row a second time (SQLSTATE 21000) error 2023-12-13 17:12:21 +08:00
colinlyguo
0da1355c4d tweak Makefile 2023-12-13 16:43:38 +08:00
colinlyguo
9b5a7f7b62 fix go mod tidy 2023-12-13 15:53:24 +08:00
colinlyguo
b534aed080 Merge branch 'develop' into fix-bridge-history-api-write-db 2023-12-13 15:15:13 +08:00
colinlyguo
1d358fa4ed tweak logs 2023-12-13 15:06:18 +08:00
colinlyguo
ebf7a46242 remove useless logs 2023-12-13 13:35:47 +08:00
colinlyguo
cd883a36c2 add batch sync height in db 2023-12-13 05:15:53 +08:00
colin
f11ce33687 perf(bridge-history-api): add db indices and concurrent blocks fetching (#1043) 2023-12-13 04:46:08 +08:00
colinlyguo
e00d5a9548 fix bugs 2023-12-12 16:07:58 +08:00
colinlyguo
faec5a5e35 trigger ci 2023-12-11 20:10:07 +08:00
colinlyguo
3a090eecdd chore: auto version bump [bot] 2023-12-11 09:39:06 +00:00
colinlyguo
bee9489c42 Merge branch 'develop' into fix-bridge-history-api-write-db 2023-12-11 17:38:32 +08:00
colinlyguo
c4c68ef12d chore: auto version bump [bot] 2023-12-11 07:16:40 +00:00
colin
7f5c823a58 Merge branch 'develop' into fix-bridge-history-api-write-db 2023-12-11 15:15:47 +08:00
colin
367636843f fix: avoid duplicated batch updates (#1029)
Co-authored-by: georgehao <haohongfan@gmail.com>
2023-12-11 14:32:13 +08:00
colin
2f1abc06f6 address comments of new bridge history api (#1027) 2023-12-08 11:16:00 +08:00
colinlyguo
cc5fd778c7 tweak 2023-12-07 23:51:50 +08:00
colin
8587116ff8 Update bridge-history-api/internal/config/config.go
Co-authored-by: georgehao <haohongfan@gmail.com>
2023-12-07 23:49:56 +08:00
colinlyguo
a75813e6c1 add PageSize limit 2023-12-07 22:58:29 +08:00
colinlyguo
01d35d7449 add fetching reverted relayed messages and fixes 2023-12-07 17:59:08 +08:00
colinlyguo
59c8470c04 fix batch index update 2023-12-07 16:41:17 +08:00
colinlyguo
5622f8f627 tweak 2023-12-07 14:10:08 +08:00
colinlyguo
5936cd61d6 fixes 2023-12-07 02:43:01 +08:00
colin
f031c38890 Apply suggestions from code review
Co-authored-by: georgehao <haohongfan@gmail.com>
2023-12-06 23:48:56 +08:00
colinlyguo
9bfefa0474 update l1 start block height to first queue event 2023-12-06 18:44:23 +08:00
colinlyguo
a6c72a2ce2 fix 2023-12-06 18:03:25 +08:00
colinlyguo
299f5e46fe change Makefile 2023-12-06 17:14:24 +08:00
colinlyguo
7629f06243 change config default value 2023-12-06 17:12:15 +08:00
colinlyguo
5902bee340 fix 2023-12-06 16:58:53 +08:00
colinlyguo
e79900e24c refactor 2023-12-06 16:55:17 +08:00
colinlyguo
0803dd97fb refactor 2023-12-06 14:33:54 +08:00
colinlyguo
bdbddc38f5 update go mod 2023-12-06 13:39:44 +08:00
colinlyguo
f011fd5ac6 feat: new bridge-history apis 2023-12-06 12:31:28 +08:00
206 changed files with 3051 additions and 11713 deletions

View File

@@ -5,314 +5,151 @@ on:
tags:
- v**
env:
AWS_REGION: us-west-2
jobs:
event_watcher:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
- name: check repo and create it if not exist
env:
REPOSITORY: event-watcher
run: |
aws --region ${{ env.AWS_REGION }} ecr describe-repositories --repository-names ${{ env.REPOSITORY }} && : || aws --region ${{ env.AWS_REGION }} ecr create-repository --repository-name ${{ env.REPOSITORY }}
- name: Build and push
uses: docker/build-push-action@v3
env:
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
REPOSITORY: event-watcher
IMAGE_TAG: ${{ github.ref_name }}
with:
context: .
file: ./build/dockerfiles/event_watcher.Dockerfile
push: true
tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push event_watcher docker
uses: docker/build-push-action@v2
with:
context: .
file: ./build/dockerfiles/event_watcher.Dockerfile
push: true
tags: scrolltech/event-watcher:${{github.ref_name}}
# cache-from: type=gha,scope=${{ github.workflow }}
# cache-to: type=gha,scope=${{ github.workflow }}
gas_oracle:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
- name: check repo and create it if not exist
env:
REPOSITORY: gas-oracle
run: |
aws --region ${{ env.AWS_REGION }} ecr describe-repositories --repository-names ${{ env.REPOSITORY }} && : || aws --region ${{ env.AWS_REGION }} ecr create-repository --repository-name ${{ env.REPOSITORY }}
- name: Build and push
uses: docker/build-push-action@v3
env:
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
REPOSITORY: gas-oracle
IMAGE_TAG: ${{ github.ref_name }}
with:
context: .
file: ./build/dockerfiles/gas_oracle.Dockerfile
push: true
tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push gas_oracle docker
uses: docker/build-push-action@v2
with:
context: .
file: ./build/dockerfiles/gas_oracle.Dockerfile
push: true
tags: scrolltech/gas-oracle:${{github.ref_name}}
# cache-from: type=gha,scope=${{ github.workflow }}
# cache-to: type=gha,scope=${{ github.workflow }}
rollup_relayer:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
- name: check repo and create it if not exist
env:
REPOSITORY: rollup-relayer
run: |
aws --region ${{ env.AWS_REGION }} ecr describe-repositories --repository-names ${{ env.REPOSITORY }} && : || aws --region ${{ env.AWS_REGION }} ecr create-repository --repository-name ${{ env.REPOSITORY }}
- name: Build and push
uses: docker/build-push-action@v3
env:
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
REPOSITORY: rollup-relayer
IMAGE_TAG: ${{ github.ref_name }}
with:
context: .
file: ./build/dockerfiles/rollup_relayer.Dockerfile
push: true
tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push rollup_relayer docker
uses: docker/build-push-action@v2
with:
context: .
file: ./build/dockerfiles/rollup_relayer.Dockerfile
push: true
tags: scrolltech/rollup-relayer:${{github.ref_name}}
# cache-from: type=gha,scope=${{ github.workflow }}
# cache-to: type=gha,scope=${{ github.workflow }}
bridgehistoryapi-fetcher:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
- name: check repo and create it if not exist
env:
REPOSITORY: bridgehistoryapi-fetcher
run: |
aws --region ${{ env.AWS_REGION }} ecr describe-repositories --repository-names ${{ env.REPOSITORY }} && : || aws --region ${{ env.AWS_REGION }} ecr create-repository --repository-name ${{ env.REPOSITORY }}
- name: Build and push
uses: docker/build-push-action@v3
env:
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
REPOSITORY: bridgehistoryapi-fetcher
IMAGE_TAG: ${{ github.ref_name }}
with:
context: .
file: ./build/dockerfiles/bridgehistoryapi-fetcher.Dockerfile
push: true
tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push bridgehistoryapi-fetcher docker
uses: docker/build-push-action@v2
with:
context: .
file: ./build/dockerfiles/bridgehistoryapi-fetcher.Dockerfile
push: true
tags: scrolltech/bridgehistoryapi-fetcher:${{github.ref_name}}
# cache-from: type=gha,scope=${{ github.workflow }}
# cache-to: type=gha,scope=${{ github.workflow }}
bridgehistoryapi-api:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
- name: check repo and create it if not exist
env:
REPOSITORY: bridgehistoryapi-api
run: |
aws --region ${{ env.AWS_REGION }} ecr describe-repositories --repository-names ${{ env.REPOSITORY }} && : || aws --region ${{ env.AWS_REGION }} ecr create-repository --repository-name ${{ env.REPOSITORY }}
- name: Build and push
uses: docker/build-push-action@v3
env:
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
REPOSITORY: bridgehistoryapi-api
IMAGE_TAG: ${{ github.ref_name }}
with:
context: .
file: ./build/dockerfiles/bridgehistoryapi-api.Dockerfile
push: true
tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push bridgehistoryapi-api docker
uses: docker/build-push-action@v2
with:
context: .
file: ./build/dockerfiles/bridgehistoryapi-api.Dockerfile
push: true
tags: scrolltech/bridgehistoryapi-api:${{github.ref_name}}
# cache-from: type=gha,scope=${{ github.workflow }}
# cache-to: type=gha,scope=${{ github.workflow }}
coordinator-api:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
- name: check repo and create it if not exist
env:
REPOSITORY: coordinator-api
run: |
aws --region ${{ env.AWS_REGION }} ecr describe-repositories --repository-names ${{ env.REPOSITORY }} && : || aws --region ${{ env.AWS_REGION }} ecr create-repository --repository-name ${{ env.REPOSITORY }}
- name: Build and push
uses: docker/build-push-action@v3
env:
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
REPOSITORY: coordinator-api
IMAGE_TAG: ${{ github.ref_name }}
with:
context: .
file: ./build/dockerfiles/coordinator-api.Dockerfile
push: true
tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push coordinator docker
uses: docker/build-push-action@v2
with:
context: .
file: ./build/dockerfiles/coordinator-api.Dockerfile
push: true
tags: scrolltech/coordinator-api:${{github.ref_name}}
# cache-from: type=gha,scope=${{ github.workflow }}
# cache-to: type=gha,scope=${{ github.workflow }}
coordinator-cron:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
- name: check repo and create it if not exist
env:
REPOSITORY: coordinator-cron
run: |
aws --region ${{ env.AWS_REGION }} ecr describe-repositories --repository-names ${{ env.REPOSITORY }} && : || aws --region ${{ env.AWS_REGION }} ecr create-repository --repository-name ${{ env.REPOSITORY }}
- name: Build and push
uses: docker/build-push-action@v3
env:
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
REPOSITORY: coordinator-cron
IMAGE_TAG: ${{ github.ref_name }}
with:
context: .
file: ./build/dockerfiles/coordinator-cron.Dockerfile
push: true
tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push coordinator docker
uses: docker/build-push-action@v2
with:
context: .
file: ./build/dockerfiles/coordinator-cron.Dockerfile
push: true
tags: scrolltech/coordinator-cron:${{github.ref_name}}
# cache-from: type=gha,scope=${{ github.workflow }}
# cache-to: type=gha,scope=${{ github.workflow }}

View File

@@ -33,7 +33,7 @@ Examples of unacceptable behavior include:
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct that could reasonably be considered inappropriate in a
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2022-2024 Scroll
Copyright (c) 2022-2023 Scroll
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -13,7 +13,7 @@
## Directory Structure
<pre>
├── <a href="./bridge-history-api/">bridge-history-api</a>: Bridge history service that collects deposit and withdraw events from both L1 and L2 chains and generates withdrawal proofs
├── <a href="./bridge-history-api/">bridge-history-api</a>: Bridge history service that collects deposit and withdraw events from both L1 and L2 chain and generates withdrawal proofs
├── <a href="./common/">common</a>: Common libraries and types
├── <a href="./coordinator/">coordinator</a>: Prover coordinator service that dispatches proving tasks to provers
├── <a href="./database">database</a>: Database client and schema definition

View File

@@ -7,7 +7,7 @@ The bridge-history-api contains three distinct components
### bridgehistoryapi-db-cli
Provide init, show version, rollback, and check status services of DB
Provide init, show version, rollback, check status services of DB
```
cd ./bridge-history-api
make bridgehistoryapi-db-cli
@@ -36,7 +36,7 @@ provides REST APIs. Please refer to the API details below.
1. `/api/txs`
```
// @Summary get all txs under the given address
// @Summary get all txs under given address
// @Accept plain
// @Produce plain
// @Param address query string true "wallet address"
@@ -60,7 +60,7 @@ provides REST APIs. Please refer to the API details below.
3. `/api/l2/unclaimed/withdrawals`
```
// @Summary get all L2 unclaimed withdrawals under the given address
// @Summary get all L2 unclaimed withdrawals under given address
// @Accept plain
// @Produce plain
// @Param address query string true "wallet address"

View File

@@ -1,11 +1,9 @@
package app
import (
"crypto/tls"
"fmt"
"os"
"os/signal"
"time"
"github.com/gin-gonic/gin"
"github.com/go-redis/redis/v8"
@@ -54,23 +52,11 @@ func action(ctx *cli.Context) error {
log.Error("failed to close db", "err", err)
}
}()
opts := &redis.Options{
Addr: cfg.Redis.Address,
Username: cfg.Redis.Username,
Password: cfg.Redis.Password,
MinIdleConns: cfg.Redis.MinIdleConns,
ReadTimeout: time.Duration(cfg.Redis.ReadTimeoutMs * int(time.Millisecond)),
}
// Production Redis service has enabled transit_encryption.
if !cfg.Redis.Local {
opts.TLSConfig = &tls.Config{
MinVersion: tls.VersionTLS12,
InsecureSkipVerify: true, //nolint:gosec
}
}
log.Info("init redis client", "addr", opts.Addr, "user name", opts.Username, "is local", cfg.Redis.Local,
"min idle connections", opts.MinIdleConns, "read timeout", opts.ReadTimeout)
redisClient := redis.NewClient(opts)
redisClient := redis.NewClient(&redis.Options{
Addr: cfg.Redis.Address,
Password: cfg.Redis.Password,
DB: cfg.Redis.DB,
})
api.InitController(db, redisClient)
router := gin.Default()

View File

@@ -68,10 +68,19 @@ func action(ctx *cli.Context) error {
observability.Server(ctx, db)
l1MessageFetcher := fetcher.NewL1MessageFetcher(subCtx, cfg.L1, db, l1Client)
// syncInfo is used to store the shared info between L1 fetcher and L2 fetcher, e.g., the sync height.
syncInfo := &fetcher.SyncInfo{}
l1MessageFetcher, err := fetcher.NewL1MessageFetcher(subCtx, cfg.L1, db, l1Client, syncInfo)
if err != nil {
log.Crit("failed to create L1 cross message fetcher", "error", err)
}
go l1MessageFetcher.Start()
l2MessageFetcher := fetcher.NewL2MessageFetcher(subCtx, cfg.L2, db, l2Client)
l2MessageFetcher, err := fetcher.NewL2MessageFetcher(subCtx, cfg.L2, db, l2Client, syncInfo)
if err != nil {
log.Crit("failed to create L2 cross message fetcher", "error", err)
}
go l2MessageFetcher.Start()
// Catch CTRL-C to ensure a graceful shutdown.

View File

@@ -1,40 +1,37 @@
{
"L1": {
"confirmation": 0,
"endpoint": "https://rpc.ankr.com/eth",
"startHeight": 18306000,
"confirmation": 2,
"endpoint": "L1-URL",
"startHeight": 4038000,
"blockTime": 12,
"fetchLimit": 16,
"MessengerAddr": "0x6774Bcbd5ceCeF1336b5300fb5186a12DDD8b367",
"ETHGatewayAddr": "0x7F2b8C31F88B6006c382775eea88297Ec1e3E905",
"WETHGatewayAddr": "0x7AC440cAe8EB6328de4fA621163a792c1EA9D4fE",
"StandardERC20GatewayAddr": "0xD8A791fE2bE73eb6E6cF1eb0cb3F36adC9B3F8f9",
"CustomERC20GatewayAddr": "0xb2b10a289A229415a124EFDeF310C10cb004B6ff",
"ERC721GatewayAddr": "0x6260aF48e8948617b8FA17F4e5CEa2d21D21554B",
"ERC1155GatewayAddr": "0xb94f7F6ABcb811c5Ac709dE14E37590fcCd975B6",
"USDCGatewayAddr": "0xf1AF3b23DE0A5Ca3CAb7261cb0061C0D779A5c7B",
"LIDOGatewayAddr": "0x6625C6332c9F91F2D27c304E729B86db87A3f504",
"DAIGatewayAddr": "0x67260A8B73C5B77B55c1805218A42A7A6F98F515",
"ScrollChainAddr": "0xa13BAF47339d63B743e7Da8741db5456DAc1E556",
"GatewayRouterAddr": "0xF8B1378579659D8F7EE5f3C929c2f3E332E41Fd6",
"MessageQueueAddr": "0x0d7E906BD9cAFa154b048cFa766Cc1E54E39AF9B"
"fetchLimit": 32,
"MessengerAddr": "0x50c7d3e7f7c656493D1D76aaa1a836CedfCBB16A",
"ETHGatewayAddr": "0x8A54A2347Da2562917304141ab67324615e9866d",
"WETHGatewayAddr": "0x3dA0BF44814cfC678376b3311838272158211695",
"StandardERC20GatewayAddr": "0x65D123d6389b900d954677c26327bfc1C3e88A13",
"CustomERC20GatewayAddr": "0x31C994F2017E71b82fd4D8118F140c81215bbb37",
"ERC721GatewayAddr": "0xEF27A5E63aa3f1B8312f744b9b4DcEB910Ba77AC",
"ERC1155GatewayAddr": "0xa5Df8530766A85936EE3E139dECE3bF081c83146",
"DAIGatewayAddr": "0x8b0B9c4e9f41b9bbDEfFee24F9f11C328093d248",
"ScrollChainAddr": "0x2D567EcE699Eabe5afCd141eDB7A4f2D0D6ce8a0",
"GatewayRouterAddr": "0x13FBE0D0e5552b8c9c4AE9e2435F38f37355998a",
"MessageQueueAddr": "0xF0B2293F5D834eAe920c6974D50957A1732de763"
},
"L2": {
"confirmation": 0,
"endpoint": "https://rpc.scroll.io",
"confirmation": 1,
"endpoint": "L2-URL",
"blockTime": 3,
"fetchLimit": 64,
"MessengerAddr": "0x781e90f1c8Fc4611c9b7497C3B47F99Ef6969CbC",
"ETHGatewayAddr": "0x6EA73e05AdC79974B931123675ea8F78FfdacDF0",
"WETHGatewayAddr": "0x7003E7B7186f0E6601203b99F7B8DECBfA391cf9",
"StandardERC20GatewayAddr": "0xE2b4795039517653c5Ae8C2A9BFdd783b48f447A",
"CustomERC20GatewayAddr": "0x64CCBE37c9A82D85A1F2E74649b7A42923067988",
"ERC721GatewayAddr": "0x7bC08E1c04fb41d75F1410363F0c5746Eae80582",
"ERC1155GatewayAddr": "0x62597Cc19703aF10B58feF87B0d5D29eFE263bcc",
"USDCGatewayAddr": "0x33B60d5Dd260d453cAC3782b0bDC01ce84672142",
"LIDOGatewayAddr": "0x8aE8f22226B9d789A36AC81474e633f8bE2856c9",
"DAIGatewayAddr": "0xaC78dff3A87b5b534e366A93E785a0ce8fA6Cc62",
"GatewayRouterAddr": "0x4C0926FF5252A435FD19e10ED15e5a249Ba19d79"
"fetchLimit": 128,
"MessengerAddr": "0xBa50f5340FB9F3Bd074bD638c9BE13eCB36E603d",
"ETHGatewayAddr": "0x91e8ADDFe1358aCa5314c644312d38237fC1101C",
"WETHGatewayAddr": "0x481B20A927206aF7A754dB8b904B052e2781ea27",
"StandardERC20GatewayAddr": "0xaDcA915971A336EA2f5b567e662F5bd74AEf9582",
"CustomERC20GatewayAddr": "0x058dec71E53079F9ED053F3a0bBca877F6f3eAcf",
"ERC721GatewayAddr": "0x179B9415194B67DC3c0b8760E075cD4415785c97",
"ERC1155GatewayAddr": "0xe17C9b9C66FAF07753cdB04316D09f52144612A5",
"DAIGatewayAddr": "0xbF28c28490988026Dca2396148DE50136A54534e",
"GatewayRouterAddr": "0x9aD3c5617eCAa556d6E166787A97081907171230",
"MessageQueueAddr": "0x5300000000000000000000000000000000000000"
},
"db": {
"dsn": "postgres://postgres:123456@localhost:5444/test?sslmode=disable",
@@ -44,10 +41,7 @@
},
"redis": {
"address": "localhost:6379",
"username": "default",
"password": "",
"local": true,
"minIdleConns": 10,
"readTimeoutMs": 500
"db": 0
}
}

View File

@@ -6,9 +6,10 @@ require (
github.com/gin-contrib/cors v1.5.0
github.com/gin-gonic/gin v1.9.1
github.com/go-redis/redis/v8 v8.11.5
github.com/google/uuid v1.4.0
github.com/pressly/goose/v3 v3.16.0
github.com/prometheus/client_golang v1.14.0
github.com/scroll-tech/go-ethereum v1.10.14-0.20240201173512-ae7cbae19c84
github.com/scroll-tech/go-ethereum v1.10.14-0.20231130005111-38a3a9c9198c
github.com/stretchr/testify v1.8.4
github.com/urfave/cli/v2 v2.25.7
golang.org/x/sync v0.5.0
@@ -18,20 +19,15 @@ require (
require (
github.com/VictoriaMetrics/fastcache v1.12.1 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bits-and-blooms/bitset v1.5.0 // indirect
github.com/btcsuite/btcd v0.20.1-beta // indirect
github.com/bytedance/sonic v1.10.1 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
github.com/chenzhuoyu/iasm v0.9.0 // indirect
github.com/consensys/bavard v0.1.13 // indirect
github.com/consensys/gnark-crypto v0.10.0 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect
github.com/crate-crypto/go-kzg-4844 v0.7.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/deckarep/golang-set v1.8.0 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/ethereum/c-kzg-4844/bindings/go v0.0.0-20230126171313-363c7d7593b4 // indirect
github.com/fsnotify/fsnotify v1.6.0 // indirect
github.com/gabriel-vasile/mimetype v1.4.2 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect
@@ -44,7 +40,6 @@ require (
github.com/golang/protobuf v1.5.3 // indirect
github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb // indirect
github.com/google/go-cmp v0.6.0 // indirect
github.com/google/uuid v1.4.0 // indirect
github.com/gorilla/websocket v1.5.0 // indirect
github.com/holiman/uint256 v1.2.4 // indirect
github.com/huin/goupnp v1.3.0 // indirect
@@ -60,7 +55,6 @@ require (
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.14 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
github.com/mmcloughlin/addchain v0.4.0 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/onsi/gomega v1.27.1 // indirect
@@ -77,7 +71,6 @@ require (
github.com/sethvargo/go-retry v0.2.4 // indirect
github.com/shirou/gopsutil v3.21.11+incompatible // indirect
github.com/status-im/keycard-go v0.2.0 // indirect
github.com/supranational/blst v0.3.11-0.20230124161941-ca03e11a3ff2 // indirect
github.com/tklauser/go-sysconf v0.3.12 // indirect
github.com/tklauser/numcpus v0.6.1 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
@@ -96,5 +89,4 @@ require (
google.golang.org/protobuf v1.31.0 // indirect
gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
rsc.io/tmplfunc v0.0.3 // indirect
)

View File

@@ -10,8 +10,6 @@ github.com/allegro/bigcache v1.2.1-0.20190218064605-e24eb225f156/go.mod h1:Cb/ax
github.com/andybalholm/brotli v1.0.6 h1:Yf9fFpf49Zrxb9NlQaluyE92/+X7UVHlhMNJN2sxfOI=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bits-and-blooms/bitset v1.5.0 h1:NpE8frKRLGHIcEzkR+gZhiioW1+WbYV6fKwD6ZIpQT8=
github.com/bits-and-blooms/bitset v1.5.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA=
github.com/btcsuite/btcd v0.20.1-beta h1:Ik4hyJqN8Jfyv3S4AGBOmyouMsYE3EdYODkMbQjwPGw=
github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ=
github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA=
@@ -35,15 +33,9 @@ github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA=
github.com/chenzhuoyu/iasm v0.9.0 h1:9fhXjVzq5hUy2gkhhgHl95zG2cEAhw9OSGs8toWWAwo=
github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
github.com/consensys/bavard v0.1.13 h1:oLhMLOFGTLdlda/kma4VOJazblc7IM5y5QPd2A/YjhQ=
github.com/consensys/bavard v0.1.13/go.mod h1:9ItSMtA/dXMAiL7BG6bqW2m3NdSEObYWoH223nGHukI=
github.com/consensys/gnark-crypto v0.10.0 h1:zRh22SR7o4K35SoNqouS9J/TKHTyU2QWaj5ldehyXtA=
github.com/consensys/gnark-crypto v0.10.0/go.mod h1:Iq/P3HHl0ElSjsg2E1gsMwhAyxnxoKK5nVyZKd+/KhU=
github.com/containerd/continuity v0.4.3 h1:6HVkalIp+2u1ZLH1J/pYX2oBVXlJZvh1X1A7bEZ9Su8=
github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w=
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/crate-crypto/go-kzg-4844 v0.7.0 h1:C0vgZRk4q4EZ/JgPfzuSoxdCq3C3mOZMBShovmncxvA=
github.com/crate-crypto/go-kzg-4844 v0.7.0/go.mod h1:1kMhvPgI0Ky3yIa+9lFySEBUBXkYxeOi8ZF1sYioxhc=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@@ -61,8 +53,6 @@ github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkp
github.com/edsrzf/mmap-go v1.0.0 h1:CEBF7HpRnUCSJgGUb5h1Gm7e3VkmVDrR8lvWVLtrOFw=
github.com/elastic/go-sysinfo v1.11.1 h1:g9mwl05njS4r69TisC+vwHWTSKywZFYYUu3so3T/Lao=
github.com/elastic/go-windows v1.0.1 h1:AlYZOldA+UJ0/2nBuqWdo90GFCgG9xuyw9SYzGUtJm0=
github.com/ethereum/c-kzg-4844/bindings/go v0.0.0-20230126171313-363c7d7593b4 h1:B2mpK+MNqgPqk2/KNi1LbqwtZDy5F7iy0mynQiBr8VA=
github.com/ethereum/c-kzg-4844/bindings/go v0.0.0-20230126171313-363c7d7593b4/go.mod h1:y4GA2JbAUama1S4QwYjC2hefgGLU8Ul0GMtL/ADMF1c=
github.com/fjl/memsize v0.0.0-20190710130421-bcb5799ab5e5 h1:FtmdgXiUlNeRsoNMFlKLDt+S+6hbjVMEW6RGQ7aUf7c=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY=
@@ -109,7 +99,6 @@ github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4=
github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk=
github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4=
github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
@@ -166,9 +155,6 @@ github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zk
github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/pointerstructure v1.2.0 h1:O+i9nHnXS3l/9Wu7r4NrEdwA2VFTicjUEN1uBnDo34A=
github.com/mmcloughlin/addchain v0.4.0 h1:SobOdjm2xLj1KkXN5/n0xTIWyZA2+s99UCY1iPfkHRY=
github.com/mmcloughlin/addchain v0.4.0/go.mod h1:A86O+tHqZLMNO4w6ZZ4FlVQEadcoqkyU72HC5wJ4RlU=
github.com/mmcloughlin/profile v0.1.1/go.mod h1:IhHD7q1ooxgwTgjxQYkACGA77oFTDdFVejUS1/tS/qU=
github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
@@ -218,8 +204,8 @@ github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncj
github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240201173512-ae7cbae19c84 h1:H3tMatapNGkOWnlXpp9HSjcKN00684jkutxqrJHU+qM=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240201173512-ae7cbae19c84/go.mod h1:7Rz2bh9pn42rGuxjh51CG7HL9SKMG3ZugJkL3emdZx8=
github.com/scroll-tech/go-ethereum v1.10.14-0.20231130005111-38a3a9c9198c h1:MnAdt80steCDli4SAD0J0spBGNY+gQvbdptNjWztHcw=
github.com/scroll-tech/go-ethereum v1.10.14-0.20231130005111-38a3a9c9198c/go.mod h1:4HrFcoStbViFVy/9l/rvKl1XmizVAaPdgqI8v0U8hOc=
github.com/scroll-tech/zktrie v0.7.1 h1:NrmZNjuBzsbrKePqdHDG+t2cXnimbtezPAFS0+L9ElE=
github.com/scroll-tech/zktrie v0.7.1/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk=
github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=
@@ -242,8 +228,6 @@ github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/supranational/blst v0.3.11-0.20230124161941-ca03e11a3ff2 h1:wh1wzwAhZBNiZO37uWS/nDaKiIwHz4mDo4pnA+fqTO0=
github.com/supranational/blst v0.3.11-0.20230124161941-ca03e11a3ff2/go.mod h1:jZJtfjgudtNl4en1tzwPIV3KjUnQUvG3/j+w+fVonLw=
github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 h1:epCh84lMvA70Z7CTTCmYQn2CKbY8j86K7/FAIr141uY=
github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU=
github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI=
@@ -343,5 +327,3 @@ modernc.org/strutil v1.2.0 h1:agBi9dp1I+eOnxXeiZawM8F4LawKv4NzGWSaLfyeNZA=
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
rsc.io/tmplfunc v0.0.3 h1:53XFQh69AfOa8Tw0Jm7t+GV7KZhOi6jzsCzTtKbMvzU=
rsc.io/tmplfunc v0.0.3/go.mod h1:AG3sTPzElb1Io3Yg4voV9AGZJuleGAwaVRxL9M49PhA=

View File

@@ -8,11 +8,11 @@ import (
"scroll-tech/common/database"
)
// FetcherConfig is the configuration of Layer1 or Layer2 fetcher.
type FetcherConfig struct {
// LayerConfig is the configuration of Layer1/Layer2
type LayerConfig struct {
Confirmation uint64 `json:"confirmation"`
Endpoint string `json:"endpoint"`
StartHeight uint64 `json:"startHeight"` // Can only be configured to contract deployment height, message proof should be updated from the very beginning.
StartHeight uint64 `json:"startHeight"`
BlockTime int64 `json:"blockTime"`
FetchLimit uint64 `json:"fetchLimit"`
MessengerAddr string `json:"MessengerAddr"`
@@ -32,19 +32,15 @@ type FetcherConfig struct {
// RedisConfig redis config
type RedisConfig struct {
Address string `json:"address"`
Username string `json:"username"`
Password string `json:"password"`
DB int `json:"db"`
Local bool `json:"local"`
MinIdleConns int `json:"minIdleConns"`
ReadTimeoutMs int `json:"readTimeoutMs"`
Address string `json:"address"`
Password string `json:"password"`
DB int `json:"db"`
}
// Config is the configuration of the bridge history backend
type Config struct {
L1 *FetcherConfig `json:"L1"`
L2 *FetcherConfig `json:"L2"`
L1 *LayerConfig `json:"L1"`
L2 *LayerConfig `json:"L2"`
DB *database.Config `json:"db"`
Redis *RedisConfig `json:"redis"`
}

View File

@@ -35,7 +35,7 @@ func (c *HistoryController) GetL2UnclaimedWithdrawalsByAddress(ctx *gin.Context)
return
}
resultData := &types.ResultData{Results: pagedTxs, Total: total}
resultData := &types.ResultData{Result: pagedTxs, Total: total}
types.RenderSuccess(ctx, resultData)
}
@@ -53,7 +53,7 @@ func (c *HistoryController) GetL2WithdrawalsByAddress(ctx *gin.Context) {
return
}
resultData := &types.ResultData{Results: pagedTxs, Total: total}
resultData := &types.ResultData{Result: pagedTxs, Total: total}
types.RenderSuccess(ctx, resultData)
}
@@ -71,7 +71,7 @@ func (c *HistoryController) GetTxsByAddress(ctx *gin.Context) {
return
}
resultData := &types.ResultData{Results: pagedTxs, Total: total}
resultData := &types.ResultData{Result: pagedTxs, Total: total}
types.RenderSuccess(ctx, resultData)
}
@@ -89,6 +89,6 @@ func (c *HistoryController) PostQueryTxsByHashes(ctx *gin.Context) {
return
}
resultData := &types.ResultData{Results: results, Total: uint64(len(results))}
resultData := &types.ResultData{Result: results, Total: uint64(len(results))}
types.RenderSuccess(ctx, resultData)
}

View File

@@ -2,12 +2,8 @@ package fetcher
import (
"context"
"math/big"
"time"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/ethclient"
"github.com/scroll-tech/go-ethereum/log"
"gorm.io/gorm"
@@ -20,78 +16,44 @@ import (
// L1MessageFetcher fetches cross message events from L1 and saves them to database.
type L1MessageFetcher struct {
ctx context.Context
cfg *config.FetcherConfig
cfg *config.LayerConfig
client *ethclient.Client
l1SyncHeight uint64
l1LastSyncBlockHash common.Hash
syncInfo *SyncInfo
l1ScanHeight uint64
eventUpdateLogic *logic.EventUpdateLogic
l1FetcherLogic *logic.L1FetcherLogic
l1MessageFetcherRunningTotal prometheus.Counter
l1MessageFetcherReorgTotal prometheus.Counter
l1MessageFetcherSyncHeight prometheus.Gauge
}
// NewL1MessageFetcher creates a new L1MessageFetcher instance.
func NewL1MessageFetcher(ctx context.Context, cfg *config.FetcherConfig, db *gorm.DB, client *ethclient.Client) *L1MessageFetcher {
c := &L1MessageFetcher{
func NewL1MessageFetcher(ctx context.Context, cfg *config.LayerConfig, db *gorm.DB, client *ethclient.Client, syncInfo *SyncInfo) (*L1MessageFetcher, error) {
return &L1MessageFetcher{
ctx: ctx,
cfg: cfg,
client: client,
eventUpdateLogic: logic.NewEventUpdateLogic(db, true),
syncInfo: syncInfo,
eventUpdateLogic: logic.NewEventUpdateLogic(db),
l1FetcherLogic: logic.NewL1FetcherLogic(cfg, db, client),
}
reg := prometheus.DefaultRegisterer
c.l1MessageFetcherRunningTotal = promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "L1_message_fetcher_running_total",
Help: "Current count of running L1 message fetcher instances.",
})
c.l1MessageFetcherReorgTotal = promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "L1_message_fetcher_reorg_total",
Help: "Total count of blockchain reorgs encountered by the L1 message fetcher.",
})
c.l1MessageFetcherSyncHeight = promauto.With(reg).NewGauge(prometheus.GaugeOpts{
Name: "L1_message_fetcher_sync_height",
Help: "Latest blockchain height the L1 message fetcher has synced with.",
})
return c
}, nil
}
// Start starts the L1 message fetching process.
func (c *L1MessageFetcher) Start() {
messageSyncedHeight, batchSyncedHeight, dbErr := c.eventUpdateLogic.GetL1SyncHeight(c.ctx)
if dbErr != nil {
log.Crit("L1MessageFetcher start failed", "err", dbErr)
}
l1SyncHeight := messageSyncedHeight
if batchSyncedHeight > l1SyncHeight {
l1SyncHeight = batchSyncedHeight
}
if c.cfg.StartHeight > l1SyncHeight {
l1SyncHeight = c.cfg.StartHeight - 1
}
// Sync from an older block to prevent reorg during restart.
if l1SyncHeight < logic.L1ReorgSafeDepth {
l1SyncHeight = 0
} else {
l1SyncHeight -= logic.L1ReorgSafeDepth
}
header, err := c.client.HeaderByNumber(c.ctx, new(big.Int).SetUint64(l1SyncHeight))
messageSyncedHeight, batchSyncedHeight, err := c.eventUpdateLogic.GetL1SyncHeight(c.ctx)
if err != nil {
log.Crit("failed to get L1 header by number", "block number", l1SyncHeight, "err", err)
return
log.Crit("L1MessageFetcher start failed", "error", err)
}
c.updateL1SyncHeight(l1SyncHeight, header.Hash())
c.l1ScanHeight = messageSyncedHeight
if batchSyncedHeight > c.l1ScanHeight {
c.l1ScanHeight = batchSyncedHeight
}
if c.cfg.StartHeight > c.l1ScanHeight {
c.l1ScanHeight = c.cfg.StartHeight - 1
}
log.Info("Start L1 message fetcher", "message synced height", messageSyncedHeight, "batch synced height", batchSyncedHeight, "config start height", c.cfg.StartHeight, "sync start height", c.l1SyncHeight+1)
log.Info("Start L1 message fetcher", "message synced height", messageSyncedHeight, "batch synced height", batchSyncedHeight, "config start height", c.cfg.StartHeight)
tick := time.NewTicker(time.Duration(c.cfg.BlockTime) * time.Second)
go func() {
@@ -108,14 +70,13 @@ func (c *L1MessageFetcher) Start() {
}
func (c *L1MessageFetcher) fetchAndSaveEvents(confirmation uint64) {
startHeight := c.l1SyncHeight + 1
endHeight, rpcErr := utils.GetBlockNumber(c.ctx, c.client, confirmation)
if rpcErr != nil {
log.Error("failed to get L1 block number", "confirmation", confirmation, "err", rpcErr)
startHeight := c.l1ScanHeight + 1
endHeight, err := utils.GetBlockNumber(c.ctx, c.client, confirmation)
if err != nil {
log.Error("failed to get L1 safe block number", "err", err)
return
}
log.Info("fetch and save missing L1 events", "start height", startHeight, "end height", endHeight, "confirmation", confirmation)
log.Info("fetch and save missing L1 events", "start height", startHeight, "end height", endHeight)
for from := startHeight; from <= endHeight; from += c.cfg.FetchLimit {
to := from + c.cfg.FetchLimit - 1
@@ -123,32 +84,27 @@ func (c *L1MessageFetcher) fetchAndSaveEvents(confirmation uint64) {
to = endHeight
}
isReorg, resyncHeight, lastBlockHash, l1FetcherResult, fetcherErr := c.l1FetcherLogic.L1Fetcher(c.ctx, from, to, c.l1LastSyncBlockHash)
fetcherResult, fetcherErr := c.l1FetcherLogic.L1Fetcher(c.ctx, from, to)
if fetcherErr != nil {
log.Error("failed to fetch L1 events", "from", from, "to", to, "err", fetcherErr)
log.Error("failed to fetch L1 events", "from", from, "to", to, "err", err)
return
}
if isReorg {
c.l1MessageFetcherReorgTotal.Inc()
log.Warn("L1 reorg happened, exit and re-enter fetchAndSaveEvents", "re-sync height", resyncHeight)
c.updateL1SyncHeight(resyncHeight, lastBlockHash)
c.l1MessageFetcherRunningTotal.Inc()
if insertUpdateErr := c.eventUpdateLogic.L1InsertOrUpdate(c.ctx, fetcherResult); insertUpdateErr != nil {
log.Error("failed to save L1 events", "from", from, "to", to, "err", err)
return
}
c.l1ScanHeight = to
l2ScannedHeight := c.syncInfo.GetL2ScanHeight()
if l2ScannedHeight == 0 {
log.Error("L2 fetcher has not successfully synced at least one round yet")
return
}
if insertUpdateErr := c.eventUpdateLogic.L1InsertOrUpdate(c.ctx, l1FetcherResult); insertUpdateErr != nil {
log.Error("failed to save L1 events", "from", from, "to", to, "err", insertUpdateErr)
if updateErr := c.eventUpdateLogic.UpdateL1BatchIndexAndStatus(c.ctx, l2ScannedHeight); updateErr != nil {
log.Error("failed to update L1 batch index and status", "from", from, "to", to, "err", err)
return
}
c.updateL1SyncHeight(to, lastBlockHash)
c.l1MessageFetcherRunningTotal.Inc()
}
}
func (c *L1MessageFetcher) updateL1SyncHeight(height uint64, blockHash common.Hash) {
c.l1MessageFetcherSyncHeight.Set(float64(height))
c.l1LastSyncBlockHash = blockHash
c.l1SyncHeight = height
}

View File

@@ -2,11 +2,10 @@ package fetcher
import (
"context"
"fmt"
"math/big"
"time"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/ethclient"
"github.com/scroll-tech/go-ethereum/log"
@@ -14,79 +13,45 @@ import (
"scroll-tech/bridge-history-api/internal/config"
"scroll-tech/bridge-history-api/internal/logic"
"scroll-tech/bridge-history-api/internal/orm"
"scroll-tech/bridge-history-api/internal/utils"
)
// L2MessageFetcher fetches cross message events from L2 and saves them to database.
type L2MessageFetcher struct {
ctx context.Context
cfg *config.FetcherConfig
db *gorm.DB
client *ethclient.Client
l2SyncHeight uint64
l2LastSyncBlockHash common.Hash
ctx context.Context
cfg *config.LayerConfig
db *gorm.DB
client *ethclient.Client
syncInfo *SyncInfo
eventUpdateLogic *logic.EventUpdateLogic
l2FetcherLogic *logic.L2FetcherLogic
l2MessageFetcherRunningTotal prometheus.Counter
l2MessageFetcherReorgTotal prometheus.Counter
l2MessageFetcherSyncHeight prometheus.Gauge
}
// NewL2MessageFetcher creates a new L2MessageFetcher instance.
func NewL2MessageFetcher(ctx context.Context, cfg *config.FetcherConfig, db *gorm.DB, client *ethclient.Client) *L2MessageFetcher {
c := &L2MessageFetcher{
func NewL2MessageFetcher(ctx context.Context, cfg *config.LayerConfig, db *gorm.DB, client *ethclient.Client, syncInfo *SyncInfo) (*L2MessageFetcher, error) {
return &L2MessageFetcher{
ctx: ctx,
cfg: cfg,
db: db,
syncInfo: syncInfo,
client: client,
eventUpdateLogic: logic.NewEventUpdateLogic(db, false),
eventUpdateLogic: logic.NewEventUpdateLogic(db),
l2FetcherLogic: logic.NewL2FetcherLogic(cfg, db, client),
}
reg := prometheus.DefaultRegisterer
c.l2MessageFetcherRunningTotal = promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "L2_message_fetcher_running_total",
Help: "Current count of running L2 message fetcher instances.",
})
c.l2MessageFetcherReorgTotal = promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "L2_message_fetcher_reorg_total",
Help: "Total count of blockchain reorgs encountered by the L2 message fetcher.",
})
c.l2MessageFetcherSyncHeight = promauto.With(reg).NewGauge(prometheus.GaugeOpts{
Name: "L2_message_fetcher_sync_height",
Help: "Latest blockchain height the L2 message fetcher has synced with.",
})
return c
}, nil
}
// Start starts the L2 message fetching process.
func (c *L2MessageFetcher) Start() {
l2SentMessageSyncedHeight, dbErr := c.eventUpdateLogic.GetL2MessageSyncedHeightInDB(c.ctx)
if dbErr != nil {
log.Crit("failed to get L2 cross message processed height", "err", dbErr)
return
}
l2SyncHeight := l2SentMessageSyncedHeight
// Sync from an older block to prevent reorg during restart.
if l2SyncHeight < logic.L2ReorgSafeDepth {
l2SyncHeight = 0
} else {
l2SyncHeight -= logic.L2ReorgSafeDepth
}
header, err := c.client.HeaderByNumber(c.ctx, new(big.Int).SetUint64(l2SyncHeight))
l2SentMessageSyncedHeight, err := c.eventUpdateLogic.GetL2MessageSyncedHeightInDB(c.ctx)
if err != nil {
log.Crit("failed to get L2 header by number", "block number", l2SyncHeight, "err", err)
log.Error("failed to get L2 cross message processed height", "err", err)
return
}
c.updateL2SyncHeight(l2SyncHeight, header.Hash())
log.Info("Start L2 message fetcher", "message synced height", l2SentMessageSyncedHeight, "sync start height", l2SyncHeight+1)
c.syncInfo.SetL2ScanHeight(l2SentMessageSyncedHeight)
log.Info("Start L2 message fetcher", "message synced height", l2SentMessageSyncedHeight)
tick := time.NewTicker(time.Duration(c.cfg.BlockTime) * time.Second)
go func() {
@@ -103,13 +68,13 @@ func (c *L2MessageFetcher) Start() {
}
func (c *L2MessageFetcher) fetchAndSaveEvents(confirmation uint64) {
startHeight := c.l2SyncHeight + 1
endHeight, rpcErr := utils.GetBlockNumber(c.ctx, c.client, confirmation)
if rpcErr != nil {
log.Error("failed to get L2 block number", "confirmation", confirmation, "err", rpcErr)
startHeight := c.syncInfo.GetL2ScanHeight() + 1
endHeight, err := utils.GetBlockNumber(c.ctx, c.client, confirmation)
if err != nil {
log.Error("failed to get L1 safe block number", "err", err)
return
}
log.Info("fetch and save missing L2 events", "start height", startHeight, "end height", endHeight, "confirmation", confirmation)
log.Info("fetch and save missing L2 events", "start height", startHeight, "end height", endHeight)
for from := startHeight; from <= endHeight; from += c.cfg.FetchLimit {
to := from + c.cfg.FetchLimit - 1
@@ -117,37 +82,67 @@ func (c *L2MessageFetcher) fetchAndSaveEvents(confirmation uint64) {
to = endHeight
}
isReorg, resyncHeight, lastBlockHash, l2FetcherResult, fetcherErr := c.l2FetcherLogic.L2Fetcher(c.ctx, from, to, c.l2LastSyncBlockHash)
if fetcherErr != nil {
log.Error("failed to fetch L2 events", "from", from, "to", to, "err", fetcherErr)
l2FilterResult, err := c.l2FetcherLogic.L2Fetcher(c.ctx, from, to)
if err != nil {
log.Error("failed to fetch L2 events", "from", from, "to", to, "err", err)
return
}
if isReorg {
c.l2MessageFetcherReorgTotal.Inc()
log.Warn("L2 reorg happened, exit and re-enter fetchAndSaveEvents", "re-sync height", resyncHeight)
c.updateL2SyncHeight(resyncHeight, lastBlockHash)
c.l2MessageFetcherRunningTotal.Inc()
if updateWithdrawErr := c.updateL2WithdrawMessageProofs(c.ctx, l2FilterResult.WithdrawMessages, to); updateWithdrawErr != nil {
log.Error("failed to update L2 withdraw message", "from", from, "to", to, "err", err)
return
}
if insertUpdateErr := c.eventUpdateLogic.L2InsertOrUpdate(c.ctx, l2FetcherResult); insertUpdateErr != nil {
log.Error("failed to save L2 events", "from", from, "to", to, "err", insertUpdateErr)
if insertUpdateErr := c.eventUpdateLogic.L2InsertOrUpdate(c.ctx, l2FilterResult); insertUpdateErr != nil {
log.Error("failed to save L2 events", "from", from, "to", to, "err", err)
return
}
if updateErr := c.eventUpdateLogic.UpdateL1BatchIndexAndStatus(c.ctx, c.l2SyncHeight); updateErr != nil {
log.Error("failed to update L1 batch index and status", "from", from, "to", to, "err", updateErr)
return
}
c.updateL2SyncHeight(to, lastBlockHash)
c.l2MessageFetcherRunningTotal.Inc()
c.syncInfo.SetL2ScanHeight(to)
}
}
func (c *L2MessageFetcher) updateL2SyncHeight(height uint64, blockHash common.Hash) {
c.l2MessageFetcherSyncHeight.Set(float64(height))
c.l2LastSyncBlockHash = blockHash
c.l2SyncHeight = height
func (c *L2MessageFetcher) updateL2WithdrawMessageProofs(ctx context.Context, l2WithdrawMessages []*orm.CrossMessage, endBlock uint64) error {
withdrawTrie := utils.NewWithdrawTrie()
message, err := c.eventUpdateLogic.GetL2LatestWithdrawal(ctx)
if err != nil {
log.Error("failed to get latest L2 sent message event", "err", err)
return err
}
if message != nil {
withdrawTrie.Initialize(message.MessageNonce, common.HexToHash(message.MessageHash), message.MerkleProof)
}
messageHashes := make([]common.Hash, len(l2WithdrawMessages))
for i, message := range l2WithdrawMessages {
messageHashes[i] = common.HexToHash(message.MessageHash)
}
for i, messageHash := range messageHashes {
proof := withdrawTrie.AppendMessages([]common.Hash{messageHash})
if err != nil {
log.Error("error generating proof", "messageHash", messageHash, "error", err)
return fmt.Errorf("error generating proof for messageHash %s: %v", messageHash, err)
}
if len(proof) != 1 {
log.Error("invalid proof len", "got", len(proof), "expected", 1)
return fmt.Errorf("invalid proof len, got: %v, expected: 1", len(proof))
}
l2WithdrawMessages[i].MerkleProof = proof[0]
}
// Verify if local info is correct.
withdrawRoot, err := c.client.StorageAt(ctx, common.HexToAddress(c.cfg.MessageQueueAddr), common.Hash{}, new(big.Int).SetUint64(endBlock))
if err != nil {
log.Error("failed to get withdraw root", "number", endBlock, "error", err)
return fmt.Errorf("failed to get withdraw root: %v, number: %v", err, endBlock)
}
if common.BytesToHash(withdrawRoot) != withdrawTrie.MessageRoot() {
log.Error("withdraw root mismatch", "expected", common.BytesToHash(withdrawRoot).String(), "got", withdrawTrie.MessageRoot().String())
return fmt.Errorf("withdraw root mismatch. expected: %v, got: %v", common.BytesToHash(withdrawRoot), withdrawTrie.MessageRoot())
}
return nil
}

View File

@@ -0,0 +1,18 @@
package fetcher
import "sync/atomic"
// SyncInfo is a struct that stores synchronization information shared between L1 fetcher and L2 fetcher.
type SyncInfo struct {
l2ScanHeight uint64
}
// SetL2ScanHeight is a method that sets the value of l2ScanHeight in SyncInfo.
func (s *SyncInfo) SetL2ScanHeight(height uint64) {
atomic.StoreUint64(&s.l2ScanHeight, height)
}
// GetL2ScanHeight is a method that retrieves the value of l2ScanHeight in SyncInfo.
func (s *SyncInfo) GetL2ScanHeight() uint64 {
return atomic.LoadUint64(&s.l2ScanHeight)
}

View File

@@ -2,16 +2,11 @@ package logic
import (
"context"
"fmt"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/log"
"gorm.io/gorm"
"scroll-tech/bridge-history-api/internal/orm"
"scroll-tech/bridge-history-api/internal/utils"
)
// EventUpdateLogic the logic of insert/update the database
@@ -19,35 +14,18 @@ type EventUpdateLogic struct {
db *gorm.DB
crossMessageOrm *orm.CrossMessage
batchEventOrm *orm.BatchEvent
eventUpdateLogicL1FinalizeBatchEventL2BlockUpdateHeight prometheus.Gauge
eventUpdateLogicL2MessageNonceUpdateHeight prometheus.Gauge
}
// NewEventUpdateLogic creates a EventUpdateLogic instance
func NewEventUpdateLogic(db *gorm.DB, isL1 bool) *EventUpdateLogic {
b := &EventUpdateLogic{
// NewEventUpdateLogic create a EventUpdateLogic instance
func NewEventUpdateLogic(db *gorm.DB) *EventUpdateLogic {
return &EventUpdateLogic{
db: db,
crossMessageOrm: orm.NewCrossMessage(db),
batchEventOrm: orm.NewBatchEvent(db),
}
if !isL1 {
reg := prometheus.DefaultRegisterer
b.eventUpdateLogicL1FinalizeBatchEventL2BlockUpdateHeight = promauto.With(reg).NewGauge(prometheus.GaugeOpts{
Name: "event_update_logic_L1_finalize_batch_event_L2_block_update_height",
Help: "L2 block height of the latest L1 batch event that has been finalized and updated in the message_table.",
})
b.eventUpdateLogicL2MessageNonceUpdateHeight = promauto.With(reg).NewGauge(prometheus.GaugeOpts{
Name: "event_update_logic_L2_message_nonce_update_height",
Help: "L2 message nonce height in the latest L1 batch event that has been finalized and updated in the message_table.",
})
}
return b
}
// GetL1SyncHeight gets the l1 sync height from db
// GetL1SyncHeight get the l1 sync height from db
func (b *EventUpdateLogic) GetL1SyncHeight(ctx context.Context) (uint64, uint64, error) {
messageSyncedHeight, err := b.crossMessageOrm.GetMessageSyncedHeightInDB(ctx, orm.MessageTypeL1SentMessage)
if err != nil {
@@ -64,7 +42,7 @@ func (b *EventUpdateLogic) GetL1SyncHeight(ctx context.Context) (uint64, uint64,
return messageSyncedHeight, batchSyncedHeight, nil
}
// GetL2MessageSyncedHeightInDB gets L2 messages synced height
// GetL2MessageSyncedHeightInDB get L2 messages synced height
func (b *EventUpdateLogic) GetL2MessageSyncedHeightInDB(ctx context.Context) (uint64, error) {
l2SentMessageSyncedHeight, err := b.crossMessageOrm.GetMessageSyncedHeightInDB(ctx, orm.MessageTypeL2SentMessage)
if err != nil {
@@ -74,121 +52,101 @@ func (b *EventUpdateLogic) GetL2MessageSyncedHeightInDB(ctx context.Context) (ui
return l2SentMessageSyncedHeight, nil
}
// L1InsertOrUpdate inserts or updates l1 messages
// GetL2LatestWithdrawal get L2 latest withdrawal message
func (b *EventUpdateLogic) GetL2LatestWithdrawal(ctx context.Context) (*orm.CrossMessage, error) {
message, err := b.crossMessageOrm.GetLatestL2Withdrawal(ctx)
if err != nil {
log.Error("failed to get latest L2 sent message event", "err", err)
return nil, err
}
return message, nil
}
// L1InsertOrUpdate insert or update l1 messages
func (b *EventUpdateLogic) L1InsertOrUpdate(ctx context.Context, l1FetcherResult *L1FilterResult) error {
if err := b.crossMessageOrm.InsertOrUpdateL1Messages(ctx, l1FetcherResult.DepositMessages); err != nil {
log.Error("failed to insert L1 deposit messages", "err", err)
return err
}
err := b.db.Transaction(func(tx *gorm.DB) error {
if txErr := b.crossMessageOrm.InsertOrUpdateL1Messages(ctx, l1FetcherResult.DepositMessages, tx); txErr != nil {
log.Error("failed to insert L1 deposit messages", "err", txErr)
return txErr
}
if err := b.crossMessageOrm.InsertOrUpdateL1RelayedMessagesOfL2Withdrawals(ctx, l1FetcherResult.RelayedMessages); err != nil {
log.Error("failed to update L1 relayed messages of L2 withdrawals", "err", err)
return err
}
if txErr := b.crossMessageOrm.InsertOrUpdateL1RelayedMessagesOfL2Withdrawals(ctx, l1FetcherResult.RelayedMessages, tx); txErr != nil {
log.Error("failed to update L1 relayed messages of L2 withdrawals", "err", txErr)
return txErr
}
if err := b.batchEventOrm.InsertOrUpdateBatchEvents(ctx, l1FetcherResult.BatchEvents); err != nil {
log.Error("failed to insert or update batch events", "err", err)
return err
}
if txErr := b.batchEventOrm.InsertOrUpdateBatchEvents(ctx, l1FetcherResult.BatchEvents, tx); txErr != nil {
log.Error("failed to insert or update batch events", "err", txErr)
return txErr
}
if err := b.crossMessageOrm.UpdateL1MessageQueueEventsInfo(ctx, l1FetcherResult.MessageQueueEvents); err != nil {
log.Error("failed to insert L1 message queue events", "err", err)
return err
}
if txErr := b.crossMessageOrm.UpdateL1MessageQueueEventsInfo(ctx, l1FetcherResult.MessageQueueEvents, tx); txErr != nil {
log.Error("failed to insert L1 message queue events", "err", txErr)
return txErr
}
if err := b.crossMessageOrm.InsertFailedL1GatewayTxs(ctx, l1FetcherResult.RevertedTxs); err != nil {
log.Error("failed to insert failed L1 gateway transactions", "err", err)
return err
}
return nil
}
func (b *EventUpdateLogic) updateL2WithdrawMessageInfos(ctx context.Context, batchIndex, startBlock, endBlock uint64) error {
l2WithdrawMessages, err := b.crossMessageOrm.GetL2WithdrawalsByBlockRange(ctx, startBlock, endBlock)
if err != nil {
log.Error("failed to get L2 withdrawals by batch index", "batch index", batchIndex, "err", err)
return err
}
if len(l2WithdrawMessages) == 0 {
if txErr := b.crossMessageOrm.InsertFailedGatewayRouterTxs(ctx, l1FetcherResult.FailedGatewayRouterTxs, tx); txErr != nil {
log.Error("failed to insert L1 failed gateway router transactions", "err", txErr)
return txErr
}
return nil
}
})
withdrawTrie := utils.NewWithdrawTrie()
lastMessage, err := b.crossMessageOrm.GetL2LatestFinalizedWithdrawal(ctx)
if err != nil {
log.Error("failed to get latest L2 finalized sent message event", "err", err)
log.Error("failed to update db of L1 events", "err", err)
return err
}
if lastMessage != nil {
withdrawTrie.Initialize(lastMessage.MessageNonce, common.HexToHash(lastMessage.MessageHash), lastMessage.MerkleProof)
}
if withdrawTrie.NextMessageNonce != l2WithdrawMessages[0].MessageNonce {
log.Error("nonce mismatch", "expected next message nonce", withdrawTrie.NextMessageNonce, "actuall next message nonce", l2WithdrawMessages[0].MessageNonce)
return fmt.Errorf("nonce mismatch")
}
messageHashes := make([]common.Hash, len(l2WithdrawMessages))
for i, message := range l2WithdrawMessages {
messageHashes[i] = common.HexToHash(message.MessageHash)
}
proofs := withdrawTrie.AppendMessages(messageHashes)
for i, message := range l2WithdrawMessages {
message.MerkleProof = proofs[i]
message.RollupStatus = int(orm.RollupStatusTypeFinalized)
message.BatchIndex = batchIndex
}
if dbErr := b.crossMessageOrm.UpdateBatchIndexRollupStatusMerkleProofOfL2Messages(ctx, l2WithdrawMessages); dbErr != nil {
log.Error("failed to update batch index and rollup status and merkle proof of L2 messages", "err", dbErr)
return dbErr
}
b.eventUpdateLogicL2MessageNonceUpdateHeight.Set(float64(withdrawTrie.NextMessageNonce - 1))
return nil
}
// UpdateL1BatchIndexAndStatus updates L1 finalized batch index and status
// UpdateL1BatchIndexAndStatus update l1 batch index and status
func (b *EventUpdateLogic) UpdateL1BatchIndexAndStatus(ctx context.Context, height uint64) error {
finalizedBatches, err := b.batchEventOrm.GetFinalizedBatchesLEBlockHeight(ctx, height)
batches, err := b.batchEventOrm.GetBatchesLEBlockHeight(ctx, height)
if err != nil {
log.Error("failed to get batches >= block height", "error", err)
return err
}
for _, finalizedBatch := range finalizedBatches {
log.Info("update finalized batch info of L2 withdrawals", "index", finalizedBatch.BatchIndex, "start", finalizedBatch.StartBlockNumber, "end", finalizedBatch.EndBlockNumber)
if updateErr := b.updateL2WithdrawMessageInfos(ctx, finalizedBatch.BatchIndex, finalizedBatch.StartBlockNumber, finalizedBatch.EndBlockNumber); updateErr != nil {
log.Error("failed to update L2 withdraw message infos", "index", finalizedBatch.BatchIndex, "start", finalizedBatch.StartBlockNumber, "end", finalizedBatch.EndBlockNumber, "error", updateErr)
return updateErr
}
if dbErr := b.batchEventOrm.UpdateBatchEventStatus(ctx, finalizedBatch.BatchIndex); dbErr != nil {
log.Error("failed to update batch event status as updated", "index", finalizedBatch.BatchIndex, "start", finalizedBatch.StartBlockNumber, "end", finalizedBatch.EndBlockNumber, "error", dbErr)
for _, batch := range batches {
log.Info("update batch info of L2 withdrawals", "index", batch.BatchIndex, "start", batch.StartBlockNumber, "end", batch.EndBlockNumber)
if dbErr := b.crossMessageOrm.UpdateBatchStatusOfL2Withdrawals(ctx, batch.StartBlockNumber, batch.EndBlockNumber, batch.BatchIndex); dbErr != nil {
log.Error("failed to update batch status of L2 sent messages", "start", batch.StartBlockNumber, "end", batch.EndBlockNumber, "index", batch.BatchIndex, "error", dbErr)
return dbErr
}
if dbErr := b.batchEventOrm.UpdateBatchEventStatus(ctx, batch.BatchIndex); dbErr != nil {
log.Error("failed to update batch event status as updated", "start", batch.StartBlockNumber, "end", batch.EndBlockNumber, "index", batch.BatchIndex, "error", dbErr)
return dbErr
}
b.eventUpdateLogicL1FinalizeBatchEventL2BlockUpdateHeight.Set(float64(finalizedBatch.EndBlockNumber))
}
return nil
}
// L2InsertOrUpdate inserts or updates L2 messages
// L2InsertOrUpdate insert or update L2 messages
func (b *EventUpdateLogic) L2InsertOrUpdate(ctx context.Context, l2FetcherResult *L2FilterResult) error {
if err := b.crossMessageOrm.InsertOrUpdateL2Messages(ctx, l2FetcherResult.WithdrawMessages); err != nil {
log.Error("failed to insert L2 withdrawal messages", "err", err)
return err
}
err := b.db.Transaction(func(tx *gorm.DB) error {
if txErr := b.crossMessageOrm.InsertOrUpdateL2Messages(ctx, l2FetcherResult.WithdrawMessages, tx); txErr != nil {
log.Error("failed to insert L2 withdrawal messages", "err", txErr)
return txErr
}
if txErr := b.crossMessageOrm.InsertOrUpdateL2RelayedMessagesOfL1Deposits(ctx, l2FetcherResult.RelayedMessages, tx); txErr != nil {
log.Error("failed to update L2 relayed messages of L1 deposits", "err", txErr)
return txErr
}
if txErr := b.crossMessageOrm.InsertOrUpdateL2RevertedRelayedMessagesOfL1Deposits(ctx, l2FetcherResult.RevertedRelayedMessages, tx); txErr != nil {
log.Error("failed to update L2 relayed messages of L1 deposits", "err", txErr)
return txErr
}
if txErr := b.crossMessageOrm.InsertFailedGatewayRouterTxs(ctx, l2FetcherResult.FailedGatewayRouterTxs, tx); txErr != nil {
log.Error("failed to insert L2 failed gateway router transactions", "err", txErr)
return txErr
}
return nil
})
if err := b.crossMessageOrm.InsertOrUpdateL2RelayedMessagesOfL1Deposits(ctx, l2FetcherResult.RelayedMessages); err != nil {
log.Error("failed to update L2 relayed messages of L1 deposits", "err", err)
return err
}
if err := b.crossMessageOrm.InsertFailedL2GatewayTxs(ctx, l2FetcherResult.OtherRevertedTxs); err != nil {
log.Error("failed to insert failed L2 gateway transactions", "err", err)
if err != nil {
log.Error("failed to update db of L2 events", "err", err)
return err
}
return nil

View File

@@ -16,20 +16,13 @@ import (
"scroll-tech/bridge-history-api/internal/orm"
"scroll-tech/bridge-history-api/internal/types"
"scroll-tech/bridge-history-api/internal/utils"
)
const (
// cacheKeyPrefixBridgeHistory serves as a specific namespace for all Redis cache keys
// associated with the 'bridge-history' user. This prefix is used to enforce access controls
// in Redis, allowing permissions to be set such that only users with the appropriate
// access rights can read or write to keys starting with "bridge-history".
cacheKeyPrefixBridgeHistory = "bridge-history-"
cacheKeyPrefixL2ClaimableWithdrawalsByAddr = cacheKeyPrefixBridgeHistory + "l2ClaimableWithdrawalsByAddr:"
cacheKeyPrefixL2WithdrawalsByAddr = cacheKeyPrefixBridgeHistory + "l2WithdrawalsByAddr:"
cacheKeyPrefixTxsByAddr = cacheKeyPrefixBridgeHistory + "txsByAddr:"
cacheKeyPrefixQueryTxsByHashes = cacheKeyPrefixBridgeHistory + "queryTxsByHashes:"
cacheKeyPrefixL2ClaimableWithdrawalsByAddr = "l2ClaimableWithdrawalsByAddr:"
cacheKeyPrefixL2WithdrawalsByAddr = "l2WithdrawalsByAddr:"
cacheKeyPrefixTxsByAddr = "txsByAddr:"
cacheKeyPrefixQueryTxsByHashes = "queryTxsByHashes:"
cacheKeyExpiredTime = 1 * time.Minute
)
@@ -262,44 +255,38 @@ func (h *HistoryLogic) GetTxsByHashes(ctx context.Context, txHashes []string) ([
func getTxHistoryInfo(message *orm.CrossMessage) *types.TxHistoryInfo {
txHistory := &types.TxHistoryInfo{
MessageHash: message.MessageHash,
TokenType: orm.TokenType(message.TokenType),
TokenIDs: utils.ConvertStringToStringArray(message.TokenIDs),
TokenAmounts: utils.ConvertStringToStringArray(message.TokenAmounts),
L1TokenAddress: message.L1TokenAddress,
L2TokenAddress: message.L2TokenAddress,
MessageType: orm.MessageType(message.MessageType),
TxStatus: orm.TxStatusType(message.TxStatus),
MsgHash: message.MessageHash,
Amount: message.TokenAmounts,
L1Token: message.L1TokenAddress,
L2Token: message.L2TokenAddress,
IsL1: orm.MessageType(message.MessageType) == orm.MessageTypeL1SentMessage,
TxStatus: message.TxStatus,
BlockTimestamp: message.BlockTimestamp,
}
if txHistory.MessageType == orm.MessageTypeL1SentMessage {
if txHistory.IsL1 {
txHistory.Hash = message.L1TxHash
txHistory.ReplayTxHash = message.L1ReplayTxHash
txHistory.RefundTxHash = message.L1RefundTxHash
txHistory.BlockNumber = message.L1BlockNumber
txHistory.CounterpartChainTx = &types.CounterpartChainTx{
txHistory.FinalizeTx = &types.Finalized{
Hash: message.L2TxHash,
BlockNumber: message.L2BlockNumber,
}
} else {
txHistory.Hash = message.L2TxHash
txHistory.BlockNumber = message.L2BlockNumber
txHistory.CounterpartChainTx = &types.CounterpartChainTx{
txHistory.FinalizeTx = &types.Finalized{
Hash: message.L1TxHash,
BlockNumber: message.L1BlockNumber,
}
if orm.RollupStatusType(message.RollupStatus) == orm.RollupStatusTypeFinalized {
txHistory.ClaimInfo = &types.ClaimInfo{
From: message.MessageFrom,
To: message.MessageTo,
Value: message.MessageValue,
Nonce: strconv.FormatUint(message.MessageNonce, 10),
Message: message.MessageData,
Proof: types.L2MessageProof{
BatchIndex: strconv.FormatUint(message.BatchIndex, 10),
MerkleProof: "0x" + common.Bytes2Hex(message.MerkleProof),
},
Claimable: true,
txHistory.ClaimInfo = &types.UserClaimInfo{
From: message.MessageFrom,
To: message.MessageTo,
Value: message.MessageValue,
Nonce: strconv.FormatUint(message.MessageNonce, 10),
Message: message.MessageData,
Proof: "0x" + common.Bytes2Hex(message.MerkleProof),
BatchIndex: strconv.FormatUint(message.BatchIndex, 10),
Claimable: true,
}
}
}

View File

@@ -2,36 +2,27 @@ package logic
import (
"context"
"math/big"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/core/types"
"github.com/scroll-tech/go-ethereum/crypto"
"github.com/scroll-tech/go-ethereum/ethclient"
"github.com/scroll-tech/go-ethereum/log"
backendabi "scroll-tech/bridge-history-api/abi"
"scroll-tech/bridge-history-api/internal/config"
"scroll-tech/bridge-history-api/internal/orm"
"scroll-tech/bridge-history-api/internal/utils"
)
// L1EventParser the l1 event parser
type L1EventParser struct {
cfg *config.FetcherConfig
client *ethclient.Client
}
// NewL1EventParser creates l1 event parser
func NewL1EventParser(cfg *config.FetcherConfig, client *ethclient.Client) *L1EventParser {
return &L1EventParser{
cfg: cfg,
client: client,
}
// NewL1EventParser create l1 event parser
func NewL1EventParser() *L1EventParser {
return &L1EventParser{}
}
// ParseL1CrossChainEventLogs parses L1 watched cross chain events.
func (e *L1EventParser) ParseL1CrossChainEventLogs(ctx context.Context, logs []types.Log, blockTimestampsMap map[uint64]uint64) ([]*orm.CrossMessage, []*orm.CrossMessage, error) {
func (e *L1EventParser) ParseL1CrossChainEventLogs(logs []types.Log, blockTimestampsMap map[uint64]uint64) ([]*orm.CrossMessage, []*orm.CrossMessage, error) {
var l1DepositMessages []*orm.CrossMessage
var l1RelayedMessages []*orm.CrossMessage
for _, vlog := range logs {
@@ -39,7 +30,7 @@ func (e *L1EventParser) ParseL1CrossChainEventLogs(ctx context.Context, logs []t
case backendabi.L1DepositETHSig:
event := backendabi.ETHMessageEvent{}
if err := utils.UnpackLog(backendabi.IL1ETHGatewayABI, &event, "DepositETH", vlog); err != nil {
log.Error("Failed to unpack DepositETH event", "err", err)
log.Warn("Failed to unpack DepositETH event", "err", err)
return nil, nil, err
}
lastMessage := l1DepositMessages[len(l1DepositMessages)-1]
@@ -51,7 +42,7 @@ func (e *L1EventParser) ParseL1CrossChainEventLogs(ctx context.Context, logs []t
event := backendabi.ERC20MessageEvent{}
err := utils.UnpackLog(backendabi.IL1ERC20GatewayABI, &event, "DepositERC20", vlog)
if err != nil {
log.Error("Failed to unpack DepositERC20 event", "err", err)
log.Warn("Failed to unpack DepositERC20 event", "err", err)
return nil, nil, err
}
lastMessage := l1DepositMessages[len(l1DepositMessages)-1]
@@ -64,7 +55,7 @@ func (e *L1EventParser) ParseL1CrossChainEventLogs(ctx context.Context, logs []t
case backendabi.L1DepositERC721Sig:
event := backendabi.ERC721MessageEvent{}
if err := utils.UnpackLog(backendabi.IL1ERC721GatewayABI, &event, "DepositERC721", vlog); err != nil {
log.Error("Failed to unpack DepositERC721 event", "err", err)
log.Warn("Failed to unpack DepositERC721 event", "err", err)
return nil, nil, err
}
lastMessage := l1DepositMessages[len(l1DepositMessages)-1]
@@ -77,7 +68,7 @@ func (e *L1EventParser) ParseL1CrossChainEventLogs(ctx context.Context, logs []t
case backendabi.L1BatchDepositERC721Sig:
event := backendabi.BatchERC721MessageEvent{}
if err := utils.UnpackLog(backendabi.IL1ERC721GatewayABI, &event, "BatchDepositERC721", vlog); err != nil {
log.Error("Failed to unpack BatchDepositERC721 event", "err", err)
log.Warn("Failed to unpack BatchDepositERC721 event", "err", err)
return nil, nil, err
}
lastMessage := l1DepositMessages[len(l1DepositMessages)-1]
@@ -90,7 +81,7 @@ func (e *L1EventParser) ParseL1CrossChainEventLogs(ctx context.Context, logs []t
case backendabi.L1DepositERC1155Sig:
event := backendabi.ERC1155MessageEvent{}
if err := utils.UnpackLog(backendabi.IL1ERC1155GatewayABI, &event, "DepositERC1155", vlog); err != nil {
log.Error("Failed to unpack DepositERC1155 event", "err", err)
log.Warn("Failed to unpack DepositERC1155 event", "err", err)
return nil, nil, err
}
lastMessage := l1DepositMessages[len(l1DepositMessages)-1]
@@ -104,7 +95,7 @@ func (e *L1EventParser) ParseL1CrossChainEventLogs(ctx context.Context, logs []t
case backendabi.L1BatchDepositERC1155Sig:
event := backendabi.BatchERC1155MessageEvent{}
if err := utils.UnpackLog(backendabi.IL1ERC1155GatewayABI, &event, "BatchDepositERC1155", vlog); err != nil {
log.Error("Failed to unpack BatchDepositERC1155 event", "err", err)
log.Warn("Failed to unpack BatchDepositERC1155 event", "err", err)
return nil, nil, err
}
lastMessage := l1DepositMessages[len(l1DepositMessages)-1]
@@ -118,17 +109,12 @@ func (e *L1EventParser) ParseL1CrossChainEventLogs(ctx context.Context, logs []t
case backendabi.L1SentMessageEventSig:
event := backendabi.L1SentMessageEvent{}
if err := utils.UnpackLog(backendabi.IL1ScrollMessengerABI, &event, "SentMessage", vlog); err != nil {
log.Error("Failed to unpack SentMessage event", "err", err)
return nil, nil, err
}
from, err := getRealFromAddress(ctx, event.Sender, event.Message, e.client, vlog.TxHash, e.cfg.GatewayRouterAddr)
if err != nil {
log.Error("Failed to get real 'from' address", "err", err)
log.Warn("Failed to unpack SentMessage event", "err", err)
return nil, nil, err
}
l1DepositMessages = append(l1DepositMessages, &orm.CrossMessage{
L1BlockNumber: vlog.BlockNumber,
Sender: from,
Sender: event.Sender.String(),
Receiver: event.Target.String(),
TokenType: int(orm.TokenTypeETH),
L1TxHash: vlog.TxHash.String(),
@@ -142,7 +128,7 @@ func (e *L1EventParser) ParseL1CrossChainEventLogs(ctx context.Context, logs []t
case backendabi.L1RelayedMessageEventSig:
event := backendabi.L1RelayedMessageEvent{}
if err := utils.UnpackLog(backendabi.IL1ScrollMessengerABI, &event, "RelayedMessage", vlog); err != nil {
log.Error("Failed to unpack RelayedMessage event", "err", err)
log.Warn("Failed to unpack RelayedMessage event", "err", err)
return nil, nil, err
}
l1RelayedMessages = append(l1RelayedMessages, &orm.CrossMessage{
@@ -155,7 +141,7 @@ func (e *L1EventParser) ParseL1CrossChainEventLogs(ctx context.Context, logs []t
case backendabi.L1FailedRelayedMessageEventSig:
event := backendabi.L1FailedRelayedMessageEvent{}
if err := utils.UnpackLog(backendabi.IL1ScrollMessengerABI, &event, "FailedRelayedMessage", vlog); err != nil {
log.Error("Failed to unpack FailedRelayedMessage event", "err", err)
log.Warn("Failed to unpack FailedRelayedMessage event", "err", err)
return nil, nil, err
}
l1RelayedMessages = append(l1RelayedMessages, &orm.CrossMessage{
@@ -178,17 +164,17 @@ func (e *L1EventParser) ParseL1BatchEventLogs(ctx context.Context, logs []types.
case backendabi.L1CommitBatchEventSig:
event := backendabi.L1CommitBatchEvent{}
if err := utils.UnpackLog(backendabi.IScrollChainABI, &event, "CommitBatch", vlog); err != nil {
log.Error("Failed to unpack CommitBatch event", "err", err)
log.Warn("Failed to unpack CommitBatch event", "err", err)
return nil, err
}
commitTx, isPending, err := client.TransactionByHash(ctx, vlog.TxHash)
if err != nil || isPending {
log.Error("Failed to get commit batch tx or the tx is still pending", "err", err, "isPending", isPending)
log.Warn("Failed to get commit Batch tx receipt or the tx is still pending", "err", err)
return nil, err
}
startBlock, endBlock, err := utils.GetBatchRangeFromCalldata(commitTx.Data())
if err != nil {
log.Error("Failed to get batch range from calldata", "hash", commitTx.Hash().String(), "height", vlog.BlockNumber)
log.Warn("Failed to get batch range from calldata", "hash", commitTx.Hash().String(), "height", vlog.BlockNumber)
return nil, err
}
l1BatchEvents = append(l1BatchEvents, &orm.BatchEvent{
@@ -202,7 +188,7 @@ func (e *L1EventParser) ParseL1BatchEventLogs(ctx context.Context, logs []types.
case backendabi.L1RevertBatchEventSig:
event := backendabi.L1RevertBatchEvent{}
if err := utils.UnpackLog(backendabi.IScrollChainABI, &event, "RevertBatch", vlog); err != nil {
log.Error("Failed to unpack RevertBatch event", "err", err)
log.Warn("Failed to unpack RevertBatch event", "err", err)
return nil, err
}
l1BatchEvents = append(l1BatchEvents, &orm.BatchEvent{
@@ -214,7 +200,7 @@ func (e *L1EventParser) ParseL1BatchEventLogs(ctx context.Context, logs []types.
case backendabi.L1FinalizeBatchEventSig:
event := backendabi.L1FinalizeBatchEvent{}
if err := utils.UnpackLog(backendabi.IScrollChainABI, &event, "FinalizeBatch", vlog); err != nil {
log.Error("Failed to unpack FinalizeBatch event", "err", err)
log.Warn("Failed to unpack FinalizeBatch event", "err", err)
return nil, err
}
l1BatchEvents = append(l1BatchEvents, &orm.BatchEvent{
@@ -229,35 +215,27 @@ func (e *L1EventParser) ParseL1BatchEventLogs(ctx context.Context, logs []types.
}
// ParseL1MessageQueueEventLogs parses L1 watched message queue events.
func (e *L1EventParser) ParseL1MessageQueueEventLogs(logs []types.Log, l1DepositMessages []*orm.CrossMessage) ([]*orm.MessageQueueEvent, error) {
messageHashes := make(map[common.Hash]struct{})
for _, msg := range l1DepositMessages {
messageHashes[common.HexToHash(msg.MessageHash)] = struct{}{}
}
func (e *L1EventParser) ParseL1MessageQueueEventLogs(logs []types.Log) ([]*orm.MessageQueueEvent, error) {
var l1MessageQueueEvents []*orm.MessageQueueEvent
for _, vlog := range logs {
switch vlog.Topics[0] {
case backendabi.L1QueueTransactionEventSig:
event := backendabi.L1QueueTransactionEvent{}
if err := utils.UnpackLog(backendabi.IL1MessageQueueABI, &event, "QueueTransaction", vlog); err != nil {
log.Error("Failed to unpack QueueTransaction event", "err", err)
log.Warn("Failed to unpack QueueTransaction event", "err", err)
return nil, err
}
messageHash := common.BytesToHash(crypto.Keccak256(event.Data))
// If the message hash is not found in the map, it's not a replayMessage or enforced tx (omitted); add it to the events.
if _, exists := messageHashes[messageHash]; !exists {
l1MessageQueueEvents = append(l1MessageQueueEvents, &orm.MessageQueueEvent{
EventType: orm.MessageQueueEventTypeQueueTransaction,
QueueIndex: event.QueueIndex,
MessageHash: messageHash,
TxHash: vlog.TxHash,
})
}
// 1. Update queue index of both sent message and replay message.
// 2. Update tx hash of replay message.
l1MessageQueueEvents = append(l1MessageQueueEvents, &orm.MessageQueueEvent{
EventType: orm.MessageQueueEventTypeQueueTransaction,
QueueIndex: event.QueueIndex,
TxHash: vlog.TxHash,
})
case backendabi.L1DequeueTransactionEventSig:
event := backendabi.L1DequeueTransactionEvent{}
if err := utils.UnpackLog(backendabi.IL1MessageQueueABI, &event, "DequeueTransaction", vlog); err != nil {
log.Error("Failed to unpack DequeueTransaction event", "err", err)
log.Warn("Failed to unpack DequeueTransaction event", "err", err)
return nil, err
}
skippedIndices := utils.GetSkippedQueueIndices(event.StartIndex.Uint64(), event.SkippedBitmap)
@@ -270,51 +248,14 @@ func (e *L1EventParser) ParseL1MessageQueueEventLogs(logs []types.Log, l1Deposit
case backendabi.L1DropTransactionEventSig:
event := backendabi.L1DropTransactionEvent{}
if err := utils.UnpackLog(backendabi.IL1MessageQueueABI, &event, "DropTransaction", vlog); err != nil {
log.Error("Failed to unpack DropTransaction event", "err", err)
log.Warn("Failed to unpack DropTransaction event", "err", err)
return nil, err
}
l1MessageQueueEvents = append(l1MessageQueueEvents, &orm.MessageQueueEvent{
EventType: orm.MessageQueueEventTypeDropTransaction,
QueueIndex: event.Index.Uint64(),
TxHash: vlog.TxHash,
})
}
}
return l1MessageQueueEvents, nil
}
func getRealFromAddress(ctx context.Context, eventSender common.Address, eventMessage []byte, client *ethclient.Client, txHash common.Hash, gatewayRouterAddr string) (string, error) {
if eventSender != common.HexToAddress(gatewayRouterAddr) {
return eventSender.String(), nil
}
// deposit/withdraw ETH: EOA -> contract 1 -> ... -> contract n -> gateway router -> messenger.
if len(eventMessage) >= 32 {
addressBytes := eventMessage[32-common.AddressLength : 32]
var address common.Address
address.SetBytes(addressBytes)
return address.Hex(), nil
}
log.Warn("event message data too short to contain an address", "length", len(eventMessage))
// Legacy handling logic if length of message < 32, for backward compatibility before the next contract upgrade.
tx, isPending, rpcErr := client.TransactionByHash(ctx, txHash)
if rpcErr != nil || isPending {
log.Error("Failed to get transaction or the transaction is still pending", "rpcErr", rpcErr, "isPending", isPending)
return "", rpcErr
}
// Case 1: deposit/withdraw ETH: EOA -> multisig -> gateway router -> messenger.
if tx.To() != nil && (*tx.To()).String() != gatewayRouterAddr {
return (*tx.To()).String(), nil
}
// Case 2: deposit/withdraw ETH: EOA -> gateway router -> messenger.
signer := types.LatestSignerForChainID(new(big.Int).SetUint64(tx.ChainId().Uint64()))
sender, err := signer.Sender(tx)
if err != nil {
log.Error("Get sender failed", "chain id", tx.ChainId().Uint64(), "tx hash", tx.Hash().String(), "err", err)
return "", err
}
return sender.String(), nil
}

View File

@@ -4,8 +4,6 @@ import (
"context"
"math/big"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/go-ethereum"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/core/types"
@@ -19,35 +17,28 @@ import (
"scroll-tech/bridge-history-api/internal/utils"
)
// L1ReorgSafeDepth represents the number of block confirmations considered safe against L1 chain reorganizations.
// Reorganizations at this depth under normal cases are extremely unlikely.
const L1ReorgSafeDepth = 64
// L1FilterResult L1 fetcher result
// L1FilterResult l1 fetcher result
type L1FilterResult struct {
DepositMessages []*orm.CrossMessage
RelayedMessages []*orm.CrossMessage
BatchEvents []*orm.BatchEvent
MessageQueueEvents []*orm.MessageQueueEvent
RevertedTxs []*orm.CrossMessage
FailedGatewayRouterTxs []*orm.CrossMessage
DepositMessages []*orm.CrossMessage
RelayedMessages []*orm.CrossMessage
BatchEvents []*orm.BatchEvent
MessageQueueEvents []*orm.MessageQueueEvent
}
// L1FetcherLogic the L1 fetcher logic
// L1FetcherLogic the l1 fetcher's logic
type L1FetcherLogic struct {
cfg *config.FetcherConfig
cfg *config.LayerConfig
client *ethclient.Client
addressList []common.Address
gatewayList []common.Address
parser *L1EventParser
db *gorm.DB
crossMessageOrm *orm.CrossMessage
batchEventOrm *orm.BatchEvent
l1FetcherLogicFetchedTotal *prometheus.CounterVec
}
// NewL1FetcherLogic creates L1 fetcher logic
func NewL1FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient.Client) *L1FetcherLogic {
// NewL1FetcherLogic create l1 fetcher logic
func NewL1FetcherLogic(cfg *config.LayerConfig, db *gorm.DB, client *ethclient.Client) *L1FetcherLogic {
addressList := []common.Address{
common.HexToAddress(cfg.ETHGatewayAddr),
@@ -66,94 +57,47 @@ func NewL1FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
common.HexToAddress(cfg.MessageQueueAddr),
}
gatewayList := []common.Address{
common.HexToAddress(cfg.ETHGatewayAddr),
common.HexToAddress(cfg.StandardERC20GatewayAddr),
common.HexToAddress(cfg.CustomERC20GatewayAddr),
common.HexToAddress(cfg.WETHGatewayAddr),
common.HexToAddress(cfg.DAIGatewayAddr),
common.HexToAddress(cfg.ERC721GatewayAddr),
common.HexToAddress(cfg.ERC1155GatewayAddr),
common.HexToAddress(cfg.MessengerAddr),
common.HexToAddress(cfg.GatewayRouterAddr),
}
// Optional erc20 gateways.
if common.HexToAddress(cfg.USDCGatewayAddr) != (common.Address{}) {
if cfg.USDCGatewayAddr != "" {
addressList = append(addressList, common.HexToAddress(cfg.USDCGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.USDCGatewayAddr))
}
if common.HexToAddress(cfg.LIDOGatewayAddr) != (common.Address{}) {
if cfg.LIDOGatewayAddr != "" {
addressList = append(addressList, common.HexToAddress(cfg.LIDOGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.LIDOGatewayAddr))
}
log.Info("L1 Fetcher configured with the following address list", "addresses", addressList, "gateways", gatewayList)
f := &L1FetcherLogic{
return &L1FetcherLogic{
db: db,
crossMessageOrm: orm.NewCrossMessage(db),
batchEventOrm: orm.NewBatchEvent(db),
cfg: cfg,
client: client,
addressList: addressList,
gatewayList: gatewayList,
parser: NewL1EventParser(cfg, client),
parser: NewL1EventParser(),
}
reg := prometheus.DefaultRegisterer
f.l1FetcherLogicFetchedTotal = promauto.With(reg).NewCounterVec(prometheus.CounterOpts{
Name: "L1_fetcher_logic_fetched_total",
Help: "The total number of events or failed txs fetched in L1 fetcher logic.",
}, []string{"type"})
return f
}
func (f *L1FetcherLogic) getBlocksAndDetectReorg(ctx context.Context, from, to uint64, lastBlockHash common.Hash) (bool, uint64, common.Hash, []*types.Block, error) {
blocks, err := utils.GetBlocksInRange(ctx, f.client, from, to)
func (f *L1FetcherLogic) gatewayRouterFailedTxs(ctx context.Context, from, to uint64) (map[uint64]uint64, []*orm.CrossMessage, error) {
blocks, err := utils.GetL1BlocksInRange(ctx, f.client, from, to)
if err != nil {
log.Error("failed to get L1 blocks in range", "from", from, "to", to, "err", err)
return false, 0, common.Hash{}, nil, err
return nil, nil, err
}
for _, block := range blocks {
if block.ParentHash() != lastBlockHash {
log.Warn("L1 reorg detected", "reorg height", block.NumberU64()-1, "expected hash", block.ParentHash().String(), "local hash", lastBlockHash.String())
var resyncHeight uint64
if block.NumberU64() > L1ReorgSafeDepth+1 {
resyncHeight = block.NumberU64() - L1ReorgSafeDepth - 1
}
header, err := f.client.HeaderByNumber(ctx, new(big.Int).SetUint64(resyncHeight))
if err != nil {
log.Error("failed to get L1 header by number", "block number", resyncHeight, "err", err)
return false, 0, common.Hash{}, nil, err
}
return true, resyncHeight, header.Hash(), nil, nil
}
lastBlockHash = block.Hash()
}
return false, 0, lastBlockHash, blocks, nil
}
func (f *L1FetcherLogic) getRevertedTxs(ctx context.Context, from, to uint64, blocks []*types.Block) (map[uint64]uint64, []*orm.CrossMessage, error) {
var l1RevertedTxs []*orm.CrossMessage
blockTimestampsMap := make(map[uint64]uint64)
var l1FailedGatewayRouterTxs []*orm.CrossMessage
for i := from; i <= to; i++ {
block := blocks[i-from]
blockTimestampsMap[block.NumberU64()] = block.Time()
for _, tx := range block.Transactions() {
// Gateways: L1 deposit.
// Messenger: L1 deposit retry (replayMessage), L1 deposit refund (dropMessage), L2 withdrawal's claim (relayMessageWithProof).
if !isTransactionToGateway(tx, f.gatewayList) {
txTo := tx.To()
if txTo == nil {
continue
}
toAddress := txTo.String()
if toAddress != f.cfg.GatewayRouterAddr {
continue
}
@@ -164,7 +108,7 @@ func (f *L1FetcherLogic) getRevertedTxs(ctx context.Context, from, to uint64, bl
return nil, nil, receiptErr
}
// Check if the transaction is failed
// Check if the transaction failed
if receipt.Status != types.ReceiptStatusFailed {
continue
}
@@ -176,18 +120,18 @@ func (f *L1FetcherLogic) getRevertedTxs(ctx context.Context, from, to uint64, bl
return nil, nil, senderErr
}
l1RevertedTxs = append(l1RevertedTxs, &orm.CrossMessage{
l1FailedGatewayRouterTxs = append(l1FailedGatewayRouterTxs, &orm.CrossMessage{
L1TxHash: tx.Hash().String(),
MessageType: int(orm.MessageTypeL1SentMessage),
Sender: sender.String(),
Receiver: (*tx.To()).String(),
L1BlockNumber: receipt.BlockNumber.Uint64(),
BlockTimestamp: block.Time(),
TxStatus: int(orm.TxStatusTypeSentTxReverted),
TxStatus: int(orm.TxStatusTypeSentFailed),
})
}
}
return blockTimestampsMap, l1RevertedTxs, nil
return blockTimestampsMap, l1FailedGatewayRouterTxs, nil
}
func (f *L1FetcherLogic) l1FetcherLogs(ctx context.Context, from, to uint64) ([]types.Log, error) {
@@ -221,110 +165,46 @@ func (f *L1FetcherLogic) l1FetcherLogs(ctx context.Context, from, to uint64) ([]
return eventLogs, nil
}
// L1Fetcher L1 fetcher
func (f *L1FetcherLogic) L1Fetcher(ctx context.Context, from, to uint64, lastBlockHash common.Hash) (bool, uint64, common.Hash, *L1FilterResult, error) {
// L1Fetcher l1 fetcher
func (f *L1FetcherLogic) L1Fetcher(ctx context.Context, from, to uint64) (*L1FilterResult, error) {
log.Info("fetch and save L1 events", "from", from, "to", to)
isReorg, reorgHeight, blockHash, blocks, getErr := f.getBlocksAndDetectReorg(ctx, from, to, lastBlockHash)
if getErr != nil {
log.Error("L1Fetcher getBlocksAndDetectReorg failed", "from", from, "to", to, "error", getErr)
return false, 0, common.Hash{}, nil, getErr
}
if isReorg {
return isReorg, reorgHeight, blockHash, nil, nil
}
blockTimestampsMap, l1RevertedTxs, err := f.getRevertedTxs(ctx, from, to, blocks)
blockTimestampsMap, l1FailedGatewayRouterTxs, err := f.gatewayRouterFailedTxs(ctx, from, to)
if err != nil {
log.Error("L1Fetcher getRevertedTxs failed", "from", from, "to", to, "error", err)
return false, 0, common.Hash{}, nil, err
log.Error("L1Fetcher gatewayRouterFailedTxs failed", "from", from, "to", to, "error", err)
return nil, err
}
eventLogs, err := f.l1FetcherLogs(ctx, from, to)
if err != nil {
log.Error("L1Fetcher l1FetcherLogs failed", "from", from, "to", to, "error", err)
return false, 0, common.Hash{}, nil, err
return nil, err
}
l1DepositMessages, l1RelayedMessages, err := f.parser.ParseL1CrossChainEventLogs(ctx, eventLogs, blockTimestampsMap)
l1DepositMessages, l1RelayedMessages, err := f.parser.ParseL1CrossChainEventLogs(eventLogs, blockTimestampsMap)
if err != nil {
log.Error("failed to parse L1 cross chain event logs", "from", from, "to", to, "err", err)
return false, 0, common.Hash{}, nil, err
return nil, err
}
l1BatchEvents, err := f.parser.ParseL1BatchEventLogs(ctx, eventLogs, f.client)
if err != nil {
log.Error("failed to parse L1 batch event logs", "from", from, "to", to, "err", err)
return false, 0, common.Hash{}, nil, err
return nil, err
}
l1MessageQueueEvents, err := f.parser.ParseL1MessageQueueEventLogs(eventLogs, l1DepositMessages)
l1MessageQueueEvents, err := f.parser.ParseL1MessageQueueEventLogs(eventLogs)
if err != nil {
log.Error("failed to parse L1 message queue event logs", "from", from, "to", to, "err", err)
return false, 0, common.Hash{}, nil, err
return nil, err
}
res := L1FilterResult{
DepositMessages: l1DepositMessages,
RelayedMessages: l1RelayedMessages,
BatchEvents: l1BatchEvents,
MessageQueueEvents: l1MessageQueueEvents,
RevertedTxs: l1RevertedTxs,
}
f.updateMetrics(res)
return false, 0, blockHash, &res, nil
}
func (f *L1FetcherLogic) updateMetrics(res L1FilterResult) {
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_failed_gateway_router_transaction").Add(float64(len(res.RevertedTxs)))
for _, depositMessage := range res.DepositMessages {
switch orm.TokenType(depositMessage.TokenType) {
case orm.TokenTypeETH:
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_deposit_eth").Add(1)
case orm.TokenTypeERC20:
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_deposit_erc20").Add(1)
case orm.TokenTypeERC721:
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_deposit_erc721").Add(1)
case orm.TokenTypeERC1155:
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_deposit_erc1155").Add(1)
}
}
for _, relayedMessage := range res.RelayedMessages {
switch orm.TxStatusType(relayedMessage.TxStatus) {
case orm.TxStatusTypeRelayed:
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_relayed_message").Add(1)
case orm.TxStatusTypeFailedRelayed:
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_failed_relayed_message").Add(1)
}
// Have not tracked L1 relayed message reverted transaction yet.
// 1. need to parse calldata of tx.
// 2. hard to track internal tx.
}
for _, batchEvent := range res.BatchEvents {
switch orm.BatchStatusType(batchEvent.BatchStatus) {
case orm.BatchStatusTypeCommitted:
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_commit_batch_event").Add(1)
case orm.BatchStatusTypeReverted:
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_revert_batch_event").Add(1)
case orm.BatchStatusTypeFinalized:
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_finalize_batch_event").Add(1)
}
}
for _, messageQueueEvent := range res.MessageQueueEvents {
switch messageQueueEvent.EventType {
case orm.MessageQueueEventTypeQueueTransaction: // sendMessage is filtered out, only leaving replayMessage or appendEnforcedTransaction.
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_replay_message_or_enforced_transaction").Add(1)
case orm.MessageQueueEventTypeDequeueTransaction:
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_skip_message").Add(1)
case orm.MessageQueueEventTypeDropTransaction:
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_drop_message").Add(1)
}
FailedGatewayRouterTxs: l1FailedGatewayRouterTxs,
DepositMessages: l1DepositMessages,
RelayedMessages: l1RelayedMessages,
BatchEvents: l1BatchEvents,
MessageQueueEvents: l1MessageQueueEvents,
}
return &res, nil
}

View File

@@ -1,35 +1,26 @@
package logic
import (
"context"
"github.com/scroll-tech/go-ethereum/common/hexutil"
"github.com/scroll-tech/go-ethereum/core/types"
"github.com/scroll-tech/go-ethereum/ethclient"
"github.com/scroll-tech/go-ethereum/log"
backendabi "scroll-tech/bridge-history-api/abi"
"scroll-tech/bridge-history-api/internal/config"
"scroll-tech/bridge-history-api/internal/orm"
"scroll-tech/bridge-history-api/internal/utils"
)
// L2EventParser the L2 event parser
type L2EventParser struct {
cfg *config.FetcherConfig
client *ethclient.Client
}
// NewL2EventParser creates the L2 event parser
func NewL2EventParser(cfg *config.FetcherConfig, client *ethclient.Client) *L2EventParser {
return &L2EventParser{
cfg: cfg,
client: client,
}
// NewL2EventParser create the L2 event parser
func NewL2EventParser() *L2EventParser {
return &L2EventParser{}
}
// ParseL2EventLogs parses L2 watched events
func (e *L2EventParser) ParseL2EventLogs(ctx context.Context, logs []types.Log, blockTimestampsMap map[uint64]uint64) ([]*orm.CrossMessage, []*orm.CrossMessage, error) {
func (e *L2EventParser) ParseL2EventLogs(logs []types.Log, blockTimestampsMap map[uint64]uint64) ([]*orm.CrossMessage, []*orm.CrossMessage, error) {
var l2WithdrawMessages []*orm.CrossMessage
var l2RelayedMessages []*orm.CrossMessage
for _, vlog := range logs {
@@ -38,7 +29,7 @@ func (e *L2EventParser) ParseL2EventLogs(ctx context.Context, logs []types.Log,
event := backendabi.ETHMessageEvent{}
err := utils.UnpackLog(backendabi.IL2ETHGatewayABI, &event, "WithdrawETH", vlog)
if err != nil {
log.Error("Failed to unpack WithdrawETH event", "err", err)
log.Warn("Failed to unpack WithdrawETH event", "err", err)
return nil, nil, err
}
lastMessage := l2WithdrawMessages[len(l2WithdrawMessages)-1]
@@ -50,7 +41,7 @@ func (e *L2EventParser) ParseL2EventLogs(ctx context.Context, logs []types.Log,
event := backendabi.ERC20MessageEvent{}
err := utils.UnpackLog(backendabi.IL2ERC20GatewayABI, &event, "WithdrawERC20", vlog)
if err != nil {
log.Error("Failed to unpack WithdrawERC20 event", "err", err)
log.Warn("Failed to unpack WithdrawERC20 event", "err", err)
return nil, nil, err
}
lastMessage := l2WithdrawMessages[len(l2WithdrawMessages)-1]
@@ -64,7 +55,7 @@ func (e *L2EventParser) ParseL2EventLogs(ctx context.Context, logs []types.Log,
event := backendabi.ERC721MessageEvent{}
err := utils.UnpackLog(backendabi.IL2ERC721GatewayABI, &event, "WithdrawERC721", vlog)
if err != nil {
log.Error("Failed to unpack WithdrawERC721 event", "err", err)
log.Warn("Failed to unpack WithdrawERC721 event", "err", err)
return nil, nil, err
}
lastMessage := l2WithdrawMessages[len(l2WithdrawMessages)-1]
@@ -78,7 +69,7 @@ func (e *L2EventParser) ParseL2EventLogs(ctx context.Context, logs []types.Log,
event := backendabi.BatchERC721MessageEvent{}
err := utils.UnpackLog(backendabi.IL2ERC721GatewayABI, &event, "BatchWithdrawERC721", vlog)
if err != nil {
log.Error("Failed to unpack BatchWithdrawERC721 event", "err", err)
log.Warn("Failed to unpack BatchWithdrawERC721 event", "err", err)
return nil, nil, err
}
lastMessage := l2WithdrawMessages[len(l2WithdrawMessages)-1]
@@ -92,7 +83,7 @@ func (e *L2EventParser) ParseL2EventLogs(ctx context.Context, logs []types.Log,
event := backendabi.ERC1155MessageEvent{}
err := utils.UnpackLog(backendabi.IL2ERC1155GatewayABI, &event, "WithdrawERC1155", vlog)
if err != nil {
log.Error("Failed to unpack WithdrawERC1155 event", "err", err)
log.Warn("Failed to unpack WithdrawERC1155 event", "err", err)
return nil, nil, err
}
lastMessage := l2WithdrawMessages[len(l2WithdrawMessages)-1]
@@ -107,7 +98,7 @@ func (e *L2EventParser) ParseL2EventLogs(ctx context.Context, logs []types.Log,
event := backendabi.BatchERC1155MessageEvent{}
err := utils.UnpackLog(backendabi.IL2ERC1155GatewayABI, &event, "BatchWithdrawERC1155", vlog)
if err != nil {
log.Error("Failed to unpack BatchWithdrawERC1155 event", "err", err)
log.Warn("Failed to unpack BatchWithdrawERC1155 event", "err", err)
return nil, nil, err
}
lastMessage := l2WithdrawMessages[len(l2WithdrawMessages)-1]
@@ -122,17 +113,12 @@ func (e *L2EventParser) ParseL2EventLogs(ctx context.Context, logs []types.Log,
event := backendabi.L2SentMessageEvent{}
err := utils.UnpackLog(backendabi.IL2ScrollMessengerABI, &event, "SentMessage", vlog)
if err != nil {
log.Error("Failed to unpack SentMessage event", "err", err)
return nil, nil, err
}
from, err := getRealFromAddress(ctx, event.Sender, event.Message, e.client, vlog.TxHash, e.cfg.GatewayRouterAddr)
if err != nil {
log.Error("Failed to get real 'from' address", "err", err)
log.Warn("Failed to unpack SentMessage event", "err", err)
return nil, nil, err
}
l2WithdrawMessages = append(l2WithdrawMessages, &orm.CrossMessage{
MessageHash: utils.ComputeMessageHash(event.Sender, event.Target, event.Value, event.MessageNonce, event.Message).String(),
Sender: from,
Sender: event.Sender.String(),
Receiver: event.Target.String(),
TokenType: int(orm.TokenTypeETH),
L2TxHash: vlog.TxHash.String(),
@@ -151,7 +137,7 @@ func (e *L2EventParser) ParseL2EventLogs(ctx context.Context, logs []types.Log,
event := backendabi.L2RelayedMessageEvent{}
err := utils.UnpackLog(backendabi.IL2ScrollMessengerABI, &event, "RelayedMessage", vlog)
if err != nil {
log.Error("Failed to unpack RelayedMessage event", "err", err)
log.Warn("Failed to unpack RelayedMessage event", "err", err)
return nil, nil, err
}
l2RelayedMessages = append(l2RelayedMessages, &orm.CrossMessage{
@@ -165,7 +151,7 @@ func (e *L2EventParser) ParseL2EventLogs(ctx context.Context, logs []types.Log,
event := backendabi.L2RelayedMessageEvent{}
err := utils.UnpackLog(backendabi.IL2ScrollMessengerABI, &event, "FailedRelayedMessage", vlog)
if err != nil {
log.Error("Failed to unpack FailedRelayedMessage event", "err", err)
log.Warn("Failed to unpack FailedRelayedMessage event", "err", err)
return nil, nil, err
}
l2RelayedMessages = append(l2RelayedMessages, &orm.CrossMessage{

View File

@@ -4,8 +4,6 @@ import (
"context"
"math/big"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/go-ethereum"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/core/types"
@@ -20,33 +18,27 @@ import (
"scroll-tech/bridge-history-api/internal/utils"
)
// L2ReorgSafeDepth represents the number of block confirmations considered safe against L2 chain reorganizations.
// Reorganizations at this depth under normal cases are extremely unlikely.
const L2ReorgSafeDepth = 256
// L2FilterResult the L2 filter result
type L2FilterResult struct {
WithdrawMessages []*orm.CrossMessage
RelayedMessages []*orm.CrossMessage // relayed, failed relayed, relay tx reverted.
OtherRevertedTxs []*orm.CrossMessage // reverted txs except relay tx reverted.
FailedGatewayRouterTxs []*orm.CrossMessage
RevertedRelayedMessages []*orm.CrossMessage
WithdrawMessages []*orm.CrossMessage
RelayedMessages []*orm.CrossMessage
}
// L2FetcherLogic the L2 fetcher logic
type L2FetcherLogic struct {
cfg *config.FetcherConfig
cfg *config.LayerConfig
client *ethclient.Client
addressList []common.Address
gatewayList []common.Address
parser *L2EventParser
db *gorm.DB
crossMessageOrm *orm.CrossMessage
batchEventOrm *orm.BatchEvent
l2FetcherLogicFetchedTotal *prometheus.CounterVec
}
// NewL2FetcherLogic create L2 fetcher logic
func NewL2FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient.Client) *L2FetcherLogic {
func NewL2FetcherLogic(cfg *config.LayerConfig, db *gorm.DB, client *ethclient.Client) *L2FetcherLogic {
addressList := []common.Address{
common.HexToAddress(cfg.ETHGatewayAddr),
@@ -61,145 +53,97 @@ func NewL2FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
common.HexToAddress(cfg.MessengerAddr),
}
gatewayList := []common.Address{
common.HexToAddress(cfg.ETHGatewayAddr),
common.HexToAddress(cfg.StandardERC20GatewayAddr),
common.HexToAddress(cfg.CustomERC20GatewayAddr),
common.HexToAddress(cfg.WETHGatewayAddr),
common.HexToAddress(cfg.DAIGatewayAddr),
common.HexToAddress(cfg.ERC721GatewayAddr),
common.HexToAddress(cfg.ERC1155GatewayAddr),
common.HexToAddress(cfg.MessengerAddr),
common.HexToAddress(cfg.GatewayRouterAddr),
}
// Optional erc20 gateways.
if common.HexToAddress(cfg.USDCGatewayAddr) != (common.Address{}) {
if cfg.USDCGatewayAddr != "" {
addressList = append(addressList, common.HexToAddress(cfg.USDCGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.USDCGatewayAddr))
}
if common.HexToAddress(cfg.LIDOGatewayAddr) != (common.Address{}) {
if cfg.LIDOGatewayAddr != "" {
addressList = append(addressList, common.HexToAddress(cfg.LIDOGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.USDCGatewayAddr))
}
log.Info("L2 Fetcher configured with the following address list", "addresses", addressList, "gateways", gatewayList)
f := &L2FetcherLogic{
return &L2FetcherLogic{
db: db,
crossMessageOrm: orm.NewCrossMessage(db),
batchEventOrm: orm.NewBatchEvent(db),
cfg: cfg,
client: client,
addressList: addressList,
gatewayList: gatewayList,
parser: NewL2EventParser(cfg, client),
parser: NewL2EventParser(),
}
reg := prometheus.DefaultRegisterer
f.l2FetcherLogicFetchedTotal = promauto.With(reg).NewCounterVec(prometheus.CounterOpts{
Name: "L2_fetcher_logic_fetched_total",
Help: "The total number of events or failed txs fetched in L2 fetcher logic.",
}, []string{"type"})
return f
}
func (f *L2FetcherLogic) getBlocksAndDetectReorg(ctx context.Context, from, to uint64, lastBlockHash common.Hash) (bool, uint64, common.Hash, []*types.Block, error) {
blocks, err := utils.GetBlocksInRange(ctx, f.client, from, to)
func (f *L2FetcherLogic) gatewayRouterFailedTxs(ctx context.Context, from, to uint64) (map[uint64]uint64, []*orm.CrossMessage, []*orm.CrossMessage, error) {
var l2FailedGatewayRouterTxs []*orm.CrossMessage
var l2RevertedRelayedMessages []*orm.CrossMessage
blockTimestampsMap := make(map[uint64]uint64)
blocks, err := utils.GetL2BlocksInRange(ctx, f.client, from, to)
if err != nil {
log.Error("failed to get L2 blocks in range", "from", from, "to", to, "err", err)
return false, 0, common.Hash{}, nil, err
return nil, nil, nil, err
}
for _, block := range blocks {
if block.ParentHash() != lastBlockHash {
log.Warn("L2 reorg detected", "reorg height", block.NumberU64()-1, "expected hash", block.ParentHash().String(), "local hash", lastBlockHash.String())
var resyncHeight uint64
if block.NumberU64() > L2ReorgSafeDepth+1 {
resyncHeight = block.NumberU64() - L2ReorgSafeDepth - 1
}
header, err := f.client.HeaderByNumber(ctx, new(big.Int).SetUint64(resyncHeight))
if err != nil {
log.Error("failed to get L2 header by number", "block number", resyncHeight, "err", err)
return false, 0, common.Hash{}, nil, err
}
return true, resyncHeight, header.Hash(), nil, nil
}
lastBlockHash = block.Hash()
}
return false, 0, lastBlockHash, blocks, nil
}
func (f *L2FetcherLogic) getRevertedTxs(ctx context.Context, from, to uint64, blocks []*types.Block) (map[uint64]uint64, []*orm.CrossMessage, []*orm.CrossMessage, error) {
var l2RevertedUserTxs []*orm.CrossMessage
var l2RevertedRelayedMessageTxs []*orm.CrossMessage
blockTimestampsMap := make(map[uint64]uint64)
for i := from; i <= to; i++ {
block := blocks[i-from]
blockTimestampsMap[block.NumberU64()] = block.Time()
for _, tx := range block.Transactions() {
if tx.IsL1MessageTx() {
txTo := tx.To()
if txTo == nil {
continue
}
toAddress := txTo.String()
if toAddress == f.cfg.GatewayRouterAddr {
receipt, receiptErr := f.client.TransactionReceipt(ctx, tx.Hash())
if receiptErr != nil {
log.Error("Failed to get transaction receipt", "txHash", tx.Hash().String(), "err", receiptErr)
return nil, nil, nil, receiptErr
}
// Check if the transaction is failed
// Check if the transaction failed
if receipt.Status == types.ReceiptStatusFailed {
l2RevertedRelayedMessageTxs = append(l2RevertedRelayedMessageTxs, &orm.CrossMessage{
MessageHash: common.BytesToHash(crypto.Keccak256(tx.AsL1MessageTx().Data)).String(),
signer := types.LatestSignerForChainID(new(big.Int).SetUint64(tx.ChainId().Uint64()))
sender, signerErr := signer.Sender(tx)
if signerErr != nil {
log.Error("get sender failed", "chain id", tx.ChainId().Uint64(), "tx hash", tx.Hash().String(), "err", signerErr)
return nil, nil, nil, signerErr
}
l2FailedGatewayRouterTxs = append(l2FailedGatewayRouterTxs, &orm.CrossMessage{
L2TxHash: tx.Hash().String(),
MessageType: int(orm.MessageTypeL2SentMessage),
Sender: sender.String(),
Receiver: (*tx.To()).String(),
L2BlockNumber: receipt.BlockNumber.Uint64(),
BlockTimestamp: block.Time(),
TxStatus: int(orm.TxStatusTypeSentFailed),
})
}
}
if tx.Type() == types.L1MessageTxType {
receipt, receiptErr := f.client.TransactionReceipt(ctx, tx.Hash())
if receiptErr != nil {
log.Error("Failed to get transaction receipt", "txHash", tx.Hash().String(), "err", receiptErr)
return nil, nil, nil, receiptErr
}
// Check if the transaction failed
if receipt.Status == types.ReceiptStatusFailed {
l2RevertedRelayedMessages = append(l2RevertedRelayedMessages, &orm.CrossMessage{
MessageHash: "0x" + common.Bytes2Hex(crypto.Keccak256(tx.AsL1MessageTx().Data)),
L2TxHash: tx.Hash().String(),
TxStatus: int(orm.TxStatusTypeRelayTxReverted),
TxStatus: int(orm.TxStatusTypeRelayedTxReverted),
L2BlockNumber: receipt.BlockNumber.Uint64(),
MessageType: int(orm.MessageTypeL1SentMessage),
})
}
continue
}
// Gateways: L2 withdrawal.
if !isTransactionToGateway(tx, f.gatewayList) {
continue
}
receipt, receiptErr := f.client.TransactionReceipt(ctx, tx.Hash())
if receiptErr != nil {
log.Error("Failed to get transaction receipt", "txHash", tx.Hash().String(), "err", receiptErr)
return nil, nil, nil, receiptErr
}
// Check if the transaction is failed
if receipt.Status == types.ReceiptStatusFailed {
signer := types.LatestSignerForChainID(new(big.Int).SetUint64(tx.ChainId().Uint64()))
sender, signerErr := signer.Sender(tx)
if signerErr != nil {
log.Error("get sender failed", "chain id", tx.ChainId().Uint64(), "tx hash", tx.Hash().String(), "err", signerErr)
return nil, nil, nil, signerErr
}
l2RevertedUserTxs = append(l2RevertedUserTxs, &orm.CrossMessage{
L2TxHash: tx.Hash().String(),
MessageType: int(orm.MessageTypeL2SentMessage),
Sender: sender.String(),
Receiver: (*tx.To()).String(),
L2BlockNumber: receipt.BlockNumber.Uint64(),
BlockTimestamp: block.Time(),
TxStatus: int(orm.TxStatusTypeSentTxReverted),
})
}
}
}
return blockTimestampsMap, l2RevertedUserTxs, l2RevertedRelayedMessageTxs, nil
return blockTimestampsMap, l2FailedGatewayRouterTxs, l2RevertedRelayedMessages, nil
}
func (f *L2FetcherLogic) l2FetcherLogs(ctx context.Context, from, to uint64) ([]types.Log, error) {
@@ -227,84 +171,32 @@ func (f *L2FetcherLogic) l2FetcherLogs(ctx context.Context, from, to uint64) ([]
}
// L2Fetcher L2 fetcher
func (f *L2FetcherLogic) L2Fetcher(ctx context.Context, from, to uint64, lastBlockHash common.Hash) (bool, uint64, common.Hash, *L2FilterResult, error) {
log.Info("fetch and save L2 events", "from", from, "to", to)
func (f *L2FetcherLogic) L2Fetcher(ctx context.Context, from, to uint64) (*L2FilterResult, error) {
log.Info("fetch and save L1 events", "from", from, "to", to)
isReorg, reorgHeight, blockHash, blocks, getErr := f.getBlocksAndDetectReorg(ctx, from, to, lastBlockHash)
if getErr != nil {
log.Error("L2Fetcher getBlocksAndDetectReorg failed", "from", from, "to", to, "error", getErr)
return false, 0, common.Hash{}, nil, getErr
}
if isReorg {
return isReorg, reorgHeight, blockHash, nil, nil
}
blockTimestampsMap, revertedUserTxs, revertedRelayMsgs, routerErr := f.getRevertedTxs(ctx, from, to, blocks)
blockTimestampsMap, l2FailedGatewayRouterTxs, l2RevertedRelayedMessages, routerErr := f.gatewayRouterFailedTxs(ctx, from, to)
if routerErr != nil {
log.Error("L2Fetcher getRevertedTxs failed", "from", from, "to", to, "error", routerErr)
return false, 0, common.Hash{}, nil, routerErr
log.Error("L2Fetcher gatewayRouterFailedTxs failed", "from", from, "to", to, "error", routerErr)
return nil, routerErr
}
eventLogs, err := f.l2FetcherLogs(ctx, from, to)
if err != nil {
log.Error("L2Fetcher l2FetcherLogs failed", "from", from, "to", to, "error", err)
return false, 0, common.Hash{}, nil, err
return nil, err
}
l2WithdrawMessages, l2RelayedMessages, err := f.parser.ParseL2EventLogs(ctx, eventLogs, blockTimestampsMap)
l2WithdrawMessages, l2RelayedMessages, err := f.parser.ParseL2EventLogs(eventLogs, blockTimestampsMap)
if err != nil {
log.Error("failed to parse L2 event logs", "from", from, "to", to, "err", err)
return false, 0, common.Hash{}, nil, err
return nil, err
}
res := L2FilterResult{
WithdrawMessages: l2WithdrawMessages,
RelayedMessages: append(l2RelayedMessages, revertedRelayMsgs...),
OtherRevertedTxs: revertedUserTxs,
FailedGatewayRouterTxs: l2FailedGatewayRouterTxs,
RevertedRelayedMessages: l2RevertedRelayedMessages,
WithdrawMessages: l2WithdrawMessages,
RelayedMessages: l2RelayedMessages,
}
f.updateMetrics(res)
return false, 0, blockHash, &res, nil
}
func (f *L2FetcherLogic) updateMetrics(res L2FilterResult) {
f.l2FetcherLogicFetchedTotal.WithLabelValues("L2_failed_gateway_router_transaction").Add(float64(len(res.OtherRevertedTxs)))
for _, withdrawMessage := range res.WithdrawMessages {
switch orm.TokenType(withdrawMessage.TokenType) {
case orm.TokenTypeETH:
f.l2FetcherLogicFetchedTotal.WithLabelValues("L2_withdraw_eth").Add(1)
case orm.TokenTypeERC20:
f.l2FetcherLogicFetchedTotal.WithLabelValues("L2_withdraw_erc20").Add(1)
case orm.TokenTypeERC721:
f.l2FetcherLogicFetchedTotal.WithLabelValues("L2_withdraw_erc721").Add(1)
case orm.TokenTypeERC1155:
f.l2FetcherLogicFetchedTotal.WithLabelValues("L2_withdraw_erc1155").Add(1)
}
}
for _, relayedMessage := range res.RelayedMessages {
switch orm.TxStatusType(relayedMessage.TxStatus) {
case orm.TxStatusTypeRelayed:
f.l2FetcherLogicFetchedTotal.WithLabelValues("L2_relayed_message").Add(1)
case orm.TxStatusTypeFailedRelayed:
f.l2FetcherLogicFetchedTotal.WithLabelValues("L2_failed_relayed_message").Add(1)
case orm.TxStatusTypeRelayTxReverted:
f.l2FetcherLogicFetchedTotal.WithLabelValues("L2_reverted_relayed_message_transaction").Add(1)
}
}
}
func isTransactionToGateway(tx *types.Transaction, gatewayList []common.Address) bool {
if tx.To() == nil {
return false
}
for _, gateway := range gatewayList {
if *tx.To() == gateway {
return true
}
}
return false
return &res, nil
}

View File

@@ -6,7 +6,6 @@ import (
"time"
"gorm.io/gorm"
"gorm.io/gorm/clause"
)
// BatchStatusType represents the type of batch status.
@@ -48,7 +47,7 @@ type BatchEvent struct {
// TableName returns the table name for the BatchEvent model.
func (*BatchEvent) TableName() string {
return "batch_event_v2"
return "batch_event"
}
// NewBatchEvent returns a new instance of BatchEvent.
@@ -56,7 +55,7 @@ func NewBatchEvent(db *gorm.DB) *BatchEvent {
return &BatchEvent{db: db}
}
// GetBatchEventSyncedHeightInDB returns the maximum l1_block_number from the batch_event_v2 table.
// GetBatchEventSyncedHeightInDB returns the maximum l1_block_number from the batch_event table.
func (c *BatchEvent) GetBatchEventSyncedHeightInDB(ctx context.Context) (uint64, error) {
var batch BatchEvent
db := c.db.WithContext(ctx)
@@ -71,13 +70,12 @@ func (c *BatchEvent) GetBatchEventSyncedHeightInDB(ctx context.Context) (uint64,
return batch.L1BlockNumber, nil
}
// GetFinalizedBatchesLEBlockHeight returns the finalized batches with end block <= given block height in db.
func (c *BatchEvent) GetFinalizedBatchesLEBlockHeight(ctx context.Context, blockHeight uint64) ([]*BatchEvent, error) {
// GetBatchesLEBlockHeight returns the batches with end block <= given block height in db.
func (c *BatchEvent) GetBatchesLEBlockHeight(ctx context.Context, blockHeight uint64) ([]*BatchEvent, error) {
var batches []*BatchEvent
db := c.db.WithContext(ctx)
db = db.Model(&BatchEvent{})
db = db.Where("end_block_number <= ?", blockHeight)
db = db.Where("batch_status = ?", BatchStatusTypeFinalized)
db = db.Where("update_status = ?", UpdateStatusTypeUnupdated)
db = db.Order("batch_index asc")
if err := db.Find(&batches).Error; err != nil {
@@ -90,21 +88,19 @@ func (c *BatchEvent) GetFinalizedBatchesLEBlockHeight(ctx context.Context, block
}
// InsertOrUpdateBatchEvents inserts a new batch event or updates an existing one based on the BatchStatusType.
func (c *BatchEvent) InsertOrUpdateBatchEvents(ctx context.Context, l1BatchEvents []*BatchEvent) error {
func (c *BatchEvent) InsertOrUpdateBatchEvents(ctx context.Context, l1BatchEvents []*BatchEvent, dbTX ...*gorm.DB) error {
for _, l1BatchEvent := range l1BatchEvents {
db := c.db
if len(dbTX) > 0 && dbTX[0] != nil {
db = dbTX[0]
}
db = db.WithContext(ctx)
db = db.Model(&BatchEvent{})
updateFields := make(map[string]interface{})
switch BatchStatusType(l1BatchEvent.BatchStatus) {
case BatchStatusTypeCommitted:
// Use the clause to either insert or ignore on conflict
db = db.Clauses(clause.OnConflict{
Columns: []clause.Column{{Name: "batch_hash"}},
DoNothing: true,
})
if err := db.Create(l1BatchEvent).Error; err != nil {
return fmt.Errorf("failed to insert or ignore batch event, error: %w", err)
return fmt.Errorf("failed to insert batch event, error: %w", err)
}
case BatchStatusTypeFinalized:
db = db.Where("batch_index = ?", l1BatchEvent.BatchIndex)

View File

@@ -5,6 +5,7 @@ import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/scroll-tech/go-ethereum/common"
"gorm.io/gorm"
"gorm.io/gorm/clause"
@@ -37,21 +38,22 @@ type TxStatusType int
// Constants for TxStatusType.
const (
// TxStatusTypeSent is one of the initial statuses for cross-chain messages.
// TxStatusTypeSent is one of the initial statuses for cross-chain messages (the other one is TxStatusTypeSentFailed).
// It is used as the default value to prevent overwriting the transaction status in scenarios where the message status might change
// from a later status (e.g., relayed) back to "sent".
// Example flow (L1 -> L2 message, and L1 fetcher is slower than L2 fetcher):
// 1. The relayed message is first tracked and processed, setting tx_status to TxStatusTypeRelayed.
// 2. The sent message is later processed (same cross-chain message), the tx_status should not over-write TxStatusTypeRelayed.
TxStatusTypeSent TxStatusType = iota
TxStatusTypeSentTxReverted // Not track message hash, thus will not be processed again anymore.
TxStatusTypeRelayed // Terminal status.
// Retry: this often occurs due to an out of gas (OOG) issue if the transaction was initiated via the frontend.
// Example flow:
// 1. A relayed message is processed, setting tx_status to TxStatusTypeRelayed.
// 2. If a sent message is later processed for the same cross-chain message, the tx_status
// should remain as TxStatusTypeRelayed and not be modified back to TxStatusTypeSent.
TxStatusTypeSent TxStatusType = iota
TxStatusTypeSentFailed
TxStatusTypeRelayed
// FailedRelayedMessage event: encoded tx failed, cannot retry. e.g., https://sepolia.scrollscan.com/tx/0xfc7d3ea5ec8dc9b664a5a886c3b33d21e665355057601033481a439498efb79a
TxStatusTypeFailedRelayed
// Retry: this often occurs due to an out of gas (OOG) issue if the transaction was initiated via the frontend.
TxStatusTypeRelayTxReverted
// In some cases, user can retry with a larger gas limit. e.g., https://sepolia.scrollscan.com/tx/0x7323a7ba29492cb47d92206411be99b27896f2823cee0633a596b646b73f1b5b
TxStatusTypeRelayedTxReverted
TxStatusTypeSkipped
TxStatusTypeDropped // Terminal status.
TxStatusTypeDropped
)
// RollupStatusType represents the status of a rollup.
@@ -78,12 +80,7 @@ const (
type MessageQueueEvent struct {
EventType MessageQueueEventType
QueueIndex uint64
// Track replay tx hash and refund tx hash.
TxHash common.Hash
// QueueTransaction only in replayMessage, to track which message is replayed.
MessageHash common.Hash
TxHash common.Hash
}
// CrossMessage represents a cross message.
@@ -98,10 +95,8 @@ type CrossMessage struct {
Sender string `json:"sender" gorm:"column:sender"`
Receiver string `json:"receiver" gorm:"column:receiver"`
MessageHash string `json:"message_hash" gorm:"column:message_hash"`
L1TxHash string `json:"l1_tx_hash" gorm:"column:l1_tx_hash"` // initial tx hash, if MessageType is MessageTypeL1SentMessage.
L1ReplayTxHash string `json:"l1_replay_tx_hash" gorm:"column:l1_replay_tx_hash"`
L1RefundTxHash string `json:"l1_refund_tx_hash" gorm:"column:l1_refund_tx_hash"`
L2TxHash string `json:"l2_tx_hash" gorm:"column:l2_tx_hash"` // initial tx hash, if MessageType is MessageTypeL2SentMessage.
L1TxHash string `json:"l1_tx_hash" gorm:"column:l1_tx_hash"`
L2TxHash string `json:"l2_tx_hash" gorm:"column:l2_tx_hash"`
L1BlockNumber uint64 `json:"l1_block_number" gorm:"column:l1_block_number"`
L2BlockNumber uint64 `json:"l2_block_number" gorm:"column:l2_block_number"`
L1TokenAddress string `json:"l1_token_address" gorm:"column:l1_token_address"`
@@ -123,7 +118,7 @@ type CrossMessage struct {
// TableName returns the table name for the CrossMessage model.
func (*CrossMessage) TableName() string {
return "cross_message_v2"
return "cross_message"
}
// NewCrossMessage returns a new instance of CrossMessage.
@@ -159,42 +154,23 @@ func (c *CrossMessage) GetMessageSyncedHeightInDB(ctx context.Context, messageTy
}
}
// GetL2LatestFinalizedWithdrawal returns the latest finalized L2 withdrawal from the database.
func (c *CrossMessage) GetL2LatestFinalizedWithdrawal(ctx context.Context) (*CrossMessage, error) {
// GetLatestL2Withdrawal returns the latest processed L2 withdrawal from the database.
func (c *CrossMessage) GetLatestL2Withdrawal(ctx context.Context) (*CrossMessage, error) {
var message CrossMessage
db := c.db.WithContext(ctx)
db = db.Model(&CrossMessage{})
db = db.Where("message_type = ?", MessageTypeL2SentMessage)
db = db.Where("rollup_status = ?", RollupStatusTypeFinalized)
db = db.Where("tx_status != ?", TxStatusTypeSentFailed)
db = db.Order("message_nonce desc")
if err := db.First(&message).Error; err != nil {
if err == gorm.ErrRecordNotFound {
return nil, nil
}
return nil, fmt.Errorf("failed to get latest L2 finalized sent message event, error: %w", err)
return nil, fmt.Errorf("failed to get latest L2 sent message event, error: %w", err)
}
return &message, nil
}
// GetL2WithdrawalsByBlockRange returns the L2 withdrawals by block range from the database.
func (c *CrossMessage) GetL2WithdrawalsByBlockRange(ctx context.Context, startBlock, endBlock uint64) ([]*CrossMessage, error) {
var messages []*CrossMessage
db := c.db.WithContext(ctx)
db = db.Model(&CrossMessage{})
db = db.Where("l2_block_number >= ?", startBlock)
db = db.Where("l2_block_number <= ?", endBlock)
db = db.Where("tx_status != ?", TxStatusTypeSentTxReverted)
db = db.Where("message_type = ?", MessageTypeL2SentMessage)
db = db.Order("message_nonce asc")
if err := db.Find(&messages).Error; err != nil {
if err == gorm.ErrRecordNotFound {
return nil, nil
}
return nil, fmt.Errorf("failed to get latest L2 finalized sent message event, error: %w", err)
}
return messages, nil
}
// GetMessagesByTxHashes retrieves all cross messages from the database that match the provided transaction hashes.
func (c *CrossMessage) GetMessagesByTxHashes(ctx context.Context, txHashes []string) ([]*CrossMessage, error) {
var messages []*CrossMessage
@@ -253,64 +229,28 @@ func (c *CrossMessage) GetTxsByAddress(ctx context.Context, sender string) ([]*C
}
// UpdateL1MessageQueueEventsInfo updates the information about L1 message queue events in the database.
func (c *CrossMessage) UpdateL1MessageQueueEventsInfo(ctx context.Context, l1MessageQueueEvents []*MessageQueueEvent) error {
// update tx statuses.
func (c *CrossMessage) UpdateL1MessageQueueEventsInfo(ctx context.Context, l1MessageQueueEvents []*MessageQueueEvent, dbTX ...*gorm.DB) error {
for _, l1MessageQueueEvent := range l1MessageQueueEvents {
db := c.db
if len(dbTX) > 0 && dbTX[0] != nil {
db = dbTX[0]
}
db = db.WithContext(ctx)
db = db.Model(&CrossMessage{})
txStatusUpdateFields := make(map[string]interface{})
db = db.Where("message_type = ?", MessageTypeL1SentMessage)
db = db.Where("message_nonce = ?", l1MessageQueueEvent.QueueIndex)
updateFields := make(map[string]interface{})
switch l1MessageQueueEvent.EventType {
case MessageQueueEventTypeQueueTransaction:
continue
// Update l1_tx_hash if the user calls replayMessage.
updateFields["l1_tx_hash"] = l1MessageQueueEvent.TxHash.String()
case MessageQueueEventTypeDequeueTransaction:
// do not over-write terminal statuses.
db = db.Where("tx_status != ?", TxStatusTypeRelayed)
db = db.Where("tx_status != ?", TxStatusTypeDropped)
db = db.Where("message_nonce = ?", l1MessageQueueEvent.QueueIndex)
db = db.Where("message_type = ?", MessageTypeL1SentMessage)
txStatusUpdateFields["tx_status"] = TxStatusTypeSkipped
updateFields["tx_status"] = TxStatusTypeSkipped
case MessageQueueEventTypeDropTransaction:
// do not over-write terminal statuses.
db = db.Where("tx_status != ?", TxStatusTypeRelayed)
db = db.Where("tx_status != ?", TxStatusTypeDropped)
db = db.Where("message_nonce = ?", l1MessageQueueEvent.QueueIndex)
db = db.Where("message_type = ?", MessageTypeL1SentMessage)
txStatusUpdateFields["tx_status"] = TxStatusTypeDropped
updateFields["tx_status"] = TxStatusTypeDropped
}
if err := db.Updates(txStatusUpdateFields).Error; err != nil {
return fmt.Errorf("failed to update tx statuses of L1 message queue events, update fields: %v, error: %w", txStatusUpdateFields, err)
}
}
// update tx hashes of replay and refund.
for _, l1MessageQueueEvent := range l1MessageQueueEvents {
db := c.db
db = db.WithContext(ctx)
db = db.Model(&CrossMessage{})
txHashUpdateFields := make(map[string]interface{})
switch l1MessageQueueEvent.EventType {
case MessageQueueEventTypeDequeueTransaction:
continue
case MessageQueueEventTypeQueueTransaction:
// only replayMessages or enforced txs (whose message hashes would not be found), sendMessages have been filtered out.
// replayMessage case:
// First SentMessage in L1: https://sepolia.etherscan.io/tx/0xbee4b631312448fcc2caac86e4dccf0a2ae0a88acd6c5fd8764d39d746e472eb
// Transaction reverted in L2: https://sepolia.scrollscan.com/tx/0xde6ef307a7da255888aad7a4c40a6b8c886e46a8a05883070bbf18b736cbfb8c
// replayMessage: https://sepolia.etherscan.io/tx/0xa5392891232bb32d98fcdbaca0d91b4d22ef2755380d07d982eebd47b147ce28
//
// Note: update l1_tx_hash if the user calls replayMessage, cannot use queue index here,
// because in replayMessage, queue index != message nonce.
// Ref: https://github.com/scroll-tech/scroll/blob/v4.3.44/contracts/src/L1/L1ScrollMessenger.sol#L187-L190
db = db.Where("message_hash = ?", l1MessageQueueEvent.MessageHash.String())
txHashUpdateFields["l1_replay_tx_hash"] = l1MessageQueueEvent.TxHash.String()
case MessageQueueEventTypeDropTransaction:
db = db.Where("message_nonce = ?", l1MessageQueueEvent.QueueIndex)
db = db.Where("message_type = ?", MessageTypeL1SentMessage)
txHashUpdateFields["l1_refund_tx_hash"] = l1MessageQueueEvent.TxHash.String()
}
if err := db.Updates(txHashUpdateFields).Error; err != nil {
return fmt.Errorf("failed to update tx hashes of replay and refund in L1 message queue events info, update fields: %v, error: %w", txHashUpdateFields, err)
if err := db.Updates(updateFields).Error; err != nil {
return fmt.Errorf("failed to update L1 message queue events info, error: %w", err)
}
}
return nil
@@ -332,33 +272,15 @@ func (c *CrossMessage) UpdateBatchStatusOfL2Withdrawals(ctx context.Context, sta
return nil
}
// UpdateBatchIndexRollupStatusMerkleProofOfL2Messages updates the batch_index, rollup_status, and merkle_proof fields for a list of L2 cross messages.
func (c *CrossMessage) UpdateBatchIndexRollupStatusMerkleProofOfL2Messages(ctx context.Context, messages []*CrossMessage) error {
if len(messages) == 0 {
return nil
}
for _, message := range messages {
updateFields := map[string]interface{}{
"batch_index": message.BatchIndex,
"rollup_status": message.RollupStatus,
"merkle_proof": message.MerkleProof,
}
db := c.db.WithContext(ctx)
db = db.Model(&CrossMessage{})
db = db.Where("message_hash = ?", message.MessageHash)
if err := db.Updates(updateFields).Error; err != nil {
return fmt.Errorf("failed to update L2 message with message_hash %s, error: %w", message.MessageHash, err)
}
}
return nil
}
// InsertOrUpdateL1Messages inserts or updates a list of L1 cross messages into the database.
func (c *CrossMessage) InsertOrUpdateL1Messages(ctx context.Context, messages []*CrossMessage) error {
func (c *CrossMessage) InsertOrUpdateL1Messages(ctx context.Context, messages []*CrossMessage, dbTX ...*gorm.DB) error {
if len(messages) == 0 {
return nil
}
db := c.db
if len(dbTX) > 0 && dbTX[0] != nil {
db = dbTX[0]
}
db = db.WithContext(ctx)
db = db.Model(&CrossMessage{})
// 'tx_status' column is not explicitly assigned during the update to prevent a later status from being overwritten back to "sent".
@@ -373,17 +295,20 @@ func (c *CrossMessage) InsertOrUpdateL1Messages(ctx context.Context, messages []
}
// InsertOrUpdateL2Messages inserts or updates a list of L2 cross messages into the database.
func (c *CrossMessage) InsertOrUpdateL2Messages(ctx context.Context, messages []*CrossMessage) error {
func (c *CrossMessage) InsertOrUpdateL2Messages(ctx context.Context, messages []*CrossMessage, dbTX ...*gorm.DB) error {
if len(messages) == 0 {
return nil
}
db := c.db
if len(dbTX) > 0 && dbTX[0] != nil {
db = dbTX[0]
}
db = db.WithContext(ctx)
db = db.Model(&CrossMessage{})
// 'tx_status' column is not explicitly assigned during the update to prevent a later status from being overwritten back to "sent".
db = db.Clauses(clause.OnConflict{
Columns: []clause.Column{{Name: "message_hash"}},
DoUpdates: clause.AssignmentColumns([]string{"sender", "receiver", "token_type", "l2_block_number", "l2_tx_hash", "l1_token_address", "l2_token_address", "token_ids", "token_amounts", "message_type", "block_timestamp", "message_from", "message_to", "message_value", "message_data", "message_nonce"}),
DoUpdates: clause.AssignmentColumns([]string{"sender", "receiver", "token_type", "l2_block_number", "l2_tx_hash", "l1_token_address", "l2_token_address", "token_ids", "token_amounts", "message_type", "block_timestamp", "message_from", "message_to", "message_value", "message_data", "merkle_proof", "message_nonce"}),
})
if err := db.Create(messages).Error; err != nil {
return fmt.Errorf("failed to insert message, error: %w", err)
@@ -391,92 +316,52 @@ func (c *CrossMessage) InsertOrUpdateL2Messages(ctx context.Context, messages []
return nil
}
// InsertFailedL2GatewayTxs inserts a list of transactions that failed to interact with the L2 gateways into the database.
// To resolve unique index confliction, L2 tx hash is used as the MessageHash.
// The OnConflict clause is used to prevent inserting same failed transactions multiple times.
func (c *CrossMessage) InsertFailedL2GatewayTxs(ctx context.Context, messages []*CrossMessage) error {
// InsertFailedGatewayRouterTxs inserts a list of transactions that failed to interact with the gateway router into the database.
// These failed transactions are only fetched once, so they are inserted without checking for duplicates.
// To resolve unique index confliction, a random UUID will be generated and used as the MessageHash.
func (c *CrossMessage) InsertFailedGatewayRouterTxs(ctx context.Context, messages []*CrossMessage, dbTX ...*gorm.DB) error {
if len(messages) == 0 {
return nil
}
for _, message := range messages {
message.MessageHash = message.L2TxHash
db := c.db
if len(dbTX) > 0 && dbTX[0] != nil {
db = dbTX[0]
}
db := c.db
db = db.WithContext(ctx)
db = db.Model(&CrossMessage{})
db = db.Clauses(clause.OnConflict{
Columns: []clause.Column{{Name: "message_hash"}},
DoNothing: true,
})
if err := db.Create(&messages).Error; err != nil {
return fmt.Errorf("failed to insert failed gateway router txs, error: %w", err)
}
return nil
}
// InsertFailedL1GatewayTxs inserts a list of transactions that failed to interact with the L1 gateways into the database.
// To resolve unique index confliction, L1 tx hash is used as the MessageHash.
// The OnConflict clause is used to prevent inserting same failed transactions multiple times.
func (c *CrossMessage) InsertFailedL1GatewayTxs(ctx context.Context, messages []*CrossMessage) error {
if len(messages) == 0 {
return nil
}
for _, message := range messages {
message.MessageHash = message.L1TxHash
message.MessageHash = uuid.New().String()
}
db := c.db
db = db.WithContext(ctx)
db = db.Model(&CrossMessage{})
db = db.Clauses(clause.OnConflict{
Columns: []clause.Column{{Name: "message_hash"}},
DoNothing: true,
})
if err := db.Create(&messages).Error; err != nil {
if err := db.Create(messages).Error; err != nil {
return fmt.Errorf("failed to insert failed gateway router txs, error: %w", err)
}
return nil
}
// InsertOrUpdateL2RelayedMessagesOfL1Deposits inserts or updates the database with a list of L2 relayed messages related to L1 deposits.
func (c *CrossMessage) InsertOrUpdateL2RelayedMessagesOfL1Deposits(ctx context.Context, l2RelayedMessages []*CrossMessage) error {
func (c *CrossMessage) InsertOrUpdateL2RelayedMessagesOfL1Deposits(ctx context.Context, l2RelayedMessages []*CrossMessage, dbTX ...*gorm.DB) error {
if len(l2RelayedMessages) == 0 {
return nil
}
// Deduplicate messages, for each message_hash, retaining message with the highest block number.
// This is necessary as a single message, like a FailedRelayedMessage or a reverted relayed transaction,
// may be relayed multiple times within certain block ranges, potentially leading to the error:
// "ERROR: ON CONFLICT DO UPDATE command cannot affect row a second time (SQLSTATE 21000)".
// This happens if we attempt to insert multiple records with the same message_hash in a single db.Create operation.
// For example, see these transactions where the same message was relayed twice within certain block ranges:
// Reverted tx 1: https://sepolia.scrollscan.com/tx/0xcd6979277c3bc747445273a5e58ef1e9692fbe101d88cfefbbb69d3aef3193c0
// Reverted tx 2: https://sepolia.scrollscan.com/tx/0x43e28ed7cb71107c18c5d8ebbdb4a1d9cac73e60391d14d41e92985028faa337
// Another example:
// FailedRelayedMessage 1: https://sepolia.scrollscan.com/tx/0xfadb147fb211e5096446c5cac3ae0a8a705d2ece6c47c65135c8874f84638f17
// FailedRelayedMessage 2: https://sepolia.scrollscan.com/tx/0x6cb149b61afd07bf2e17561a59ebebde41e343b6610290c97515b2f862160b42
mergedL2RelayedMessages := make(map[string]*CrossMessage)
for _, message := range l2RelayedMessages {
if existing, found := mergedL2RelayedMessages[message.MessageHash]; found {
if TxStatusType(message.TxStatus) == TxStatusTypeRelayed || message.L2BlockNumber > existing.L2BlockNumber {
mergedL2RelayedMessages[message.MessageHash] = message
}
} else {
mergedL2RelayedMessages[message.MessageHash] = message
}
db := c.db.WithContext(ctx)
db = db.Model(&CrossMessage{})
db = db.Clauses(clause.OnConflict{
Columns: []clause.Column{{Name: "message_hash"}},
DoUpdates: clause.AssignmentColumns([]string{"message_type", "l2_block_number", "l2_tx_hash", "tx_status"}),
})
if err := db.Create(l2RelayedMessages).Error; err != nil {
return fmt.Errorf("failed to update L2 relayed message of L1 deposit, error: %w", err)
}
uniqueL2RelayedMessages := make([]*CrossMessage, 0, len(mergedL2RelayedMessages))
for _, msg := range mergedL2RelayedMessages {
uniqueL2RelayedMessages = append(uniqueL2RelayedMessages, msg)
return nil
}
// InsertOrUpdateL2RevertedRelayedMessagesOfL1Deposits inserts or updates the database with a list of L2 relayed messages related to L1 deposits.
func (c *CrossMessage) InsertOrUpdateL2RevertedRelayedMessagesOfL1Deposits(ctx context.Context, l2RevertedRelayedMessages []*CrossMessage, dbTX ...*gorm.DB) error {
if len(l2RevertedRelayedMessages) == 0 {
return nil
}
// Do not update tx status of successfully relayed messages,
// because if a message is handled, the later relayed message tx would be reverted.
// ref: https://github.com/scroll-tech/scroll/blob/v4.3.44/contracts/src/L2/L2ScrollMessenger.sol#L102
// e.g.,
// Do not update tx status of successfully relayed messages. e.g.,
// Successfully relayed: https://sepolia.scrollscan.com/tx/0x4eb7cb07ba76956259c0079819a34a146f8a93dd891dc94812e9b3d66b056ec7#eventlog
// Reverted tx 1 (Reason: Message was already successfully executed): https://sepolia.scrollscan.com/tx/0x1973cafa14eb40734df30da7bfd4d9aceb53f8f26e09d96198c16d0e2e4a95fd
// Reverted tx 2 (Reason: Message was already successfully executed): https://sepolia.scrollscan.com/tx/0x02fc3a28684a590aead2482022f56281539085bd3d273ac8dedc1ceccb2bc554
@@ -485,24 +370,16 @@ func (c *CrossMessage) InsertOrUpdateL2RelayedMessagesOfL1Deposits(ctx context.C
db = db.Clauses(clause.OnConflict{
Columns: []clause.Column{{Name: "message_hash"}},
DoUpdates: clause.AssignmentColumns([]string{"message_type", "l2_block_number", "l2_tx_hash", "tx_status"}),
Where: clause.Where{
Exprs: []clause.Expression{
clause.And(
// do not over-write terminal statuses.
clause.Neq{Column: "cross_message_v2.tx_status", Value: TxStatusTypeRelayed},
clause.Neq{Column: "cross_message_v2.tx_status", Value: TxStatusTypeDropped},
),
},
},
Where: clause.Where{Exprs: []clause.Expression{clause.Neq{Column: "cross_message.tx_status", Value: TxStatusTypeRelayed}}},
})
if err := db.Create(uniqueL2RelayedMessages).Error; err != nil {
if err := db.Create(l2RevertedRelayedMessages).Error; err != nil {
return fmt.Errorf("failed to update L2 reverted relayed message of L1 deposit, error: %w", err)
}
return nil
}
// InsertOrUpdateL1RelayedMessagesOfL2Withdrawals inserts or updates the database with a list of L1 relayed messages related to L2 withdrawals.
func (c *CrossMessage) InsertOrUpdateL1RelayedMessagesOfL2Withdrawals(ctx context.Context, l1RelayedMessages []*CrossMessage) error {
func (c *CrossMessage) InsertOrUpdateL1RelayedMessagesOfL2Withdrawals(ctx context.Context, l1RelayedMessages []*CrossMessage, dbTX ...*gorm.DB) error {
if len(l1RelayedMessages) == 0 {
return nil
}
@@ -514,13 +391,10 @@ func (c *CrossMessage) InsertOrUpdateL1RelayedMessagesOfL2Withdrawals(ctx contex
// For example, see these transactions where the same message was relayed twice within certain block ranges:
// FailedRelayedMessage 1: https://sepolia.etherscan.io/tx/0x28b3212cda6ca0f3790f362a780257bbe2b37417ccf75a4eca6c3a08294c8f1b#eventlog
// FailedRelayedMessage 2: https://sepolia.etherscan.io/tx/0xc8a8254825dd2cab5caef58cfd8d88c077ceadadc78f2340214a86cf8ab88543#eventlog
// Another example (relayed success, then relayed again):
// Relay Message, and success: https://sepolia.etherscan.io/tx/0xcfdf2f5446719e3e123a8aa06e4d6b3809c3850a13adf875755c8b1e423aa448#eventlog
// Relay Message again, and reverted: https://sepolia.etherscan.io/tx/0xb1fcae7546f3de4cfd0b4d679f4075adb4eb69578b12e2b5673f5f24b1836578
mergedL1RelayedMessages := make(map[string]*CrossMessage)
for _, message := range l1RelayedMessages {
if existing, found := mergedL1RelayedMessages[message.MessageHash]; found {
if TxStatusType(message.TxStatus) == TxStatusTypeRelayed || message.L1BlockNumber > existing.L1BlockNumber {
if message.L1BlockNumber > existing.L1BlockNumber {
mergedL1RelayedMessages[message.MessageHash] = message
}
} else {
@@ -532,20 +406,14 @@ func (c *CrossMessage) InsertOrUpdateL1RelayedMessagesOfL2Withdrawals(ctx contex
uniqueL1RelayedMessages = append(uniqueL1RelayedMessages, msg)
}
db := c.db
if len(dbTX) > 0 && dbTX[0] != nil {
db = dbTX[0]
}
db = db.WithContext(ctx)
db = db.Model(&CrossMessage{})
db = db.Clauses(clause.OnConflict{
Columns: []clause.Column{{Name: "message_hash"}},
DoUpdates: clause.AssignmentColumns([]string{"message_type", "l1_block_number", "l1_tx_hash", "tx_status"}),
Where: clause.Where{
Exprs: []clause.Expression{
clause.And(
// do not over-write terminal statuses.
clause.Neq{Column: "cross_message_v2.tx_status", Value: TxStatusTypeRelayed},
clause.Neq{Column: "cross_message_v2.tx_status", Value: TxStatusTypeDropped},
),
},
},
})
if err := db.Create(uniqueL1RelayedMessages).Error; err != nil {
return fmt.Errorf("failed to update L1 relayed message of L2 withdrawal, error: %w", err)

View File

@@ -18,7 +18,7 @@ const MigrationsDir string = "migrations"
func init() {
goose.SetBaseFS(embedMigrations)
goose.SetSequential(true)
goose.SetTableName("bridge_historyv2_migrations")
goose.SetTableName("bridge_history_migrations")
verbose, _ := strconv.ParseBool(os.Getenv("LOG_SQL_MIGRATIONS"))
goose.SetVerbose(verbose)

View File

@@ -1,6 +1,6 @@
-- +goose Up
-- +goose StatementBegin
CREATE TABLE cross_message_v2
CREATE TABLE cross_message
(
id BIGSERIAL PRIMARY KEY,
message_type SMALLINT NOT NULL,
@@ -12,8 +12,6 @@ CREATE TABLE cross_message_v2
message_hash VARCHAR DEFAULT NULL, -- NULL for failed txs
l1_tx_hash VARCHAR DEFAULT NULL,
l1_replay_tx_hash VARCHAR DEFAULT NULL,
l1_refund_tx_hash VARCHAR DEFAULT NULL,
l2_tx_hash VARCHAR DEFAULT NULL,
l1_block_number BIGINT DEFAULT NULL,
l2_block_number BIGINT DEFAULT NULL,
@@ -38,20 +36,19 @@ CREATE TABLE cross_message_v2
deleted_at TIMESTAMP(0) DEFAULT NULL
);
CREATE UNIQUE INDEX IF NOT EXISTS idx_cm_message_hash ON cross_message_v2 (message_hash);
CREATE INDEX IF NOT EXISTS idx_cm_message_type_l1_block_number ON cross_message_v2 (message_type, l1_block_number DESC);
CREATE INDEX IF NOT EXISTS idx_cm_message_type_l2_block_number ON cross_message_v2 (message_type, l2_block_number DESC);
CREATE INDEX IF NOT EXISTS idx_cm_message_type_rollup_status_message_nonce ON cross_message_v2 (message_type, rollup_status, message_nonce DESC);
CREATE INDEX IF NOT EXISTS idx_cm_message_type_message_nonce_tx_status_l2_block_number ON cross_message_v2 (message_type, message_nonce, tx_status, l2_block_number);
CREATE INDEX IF NOT EXISTS idx_cm_l1_tx_hash ON cross_message_v2 (l1_tx_hash);
CREATE INDEX IF NOT EXISTS idx_cm_l2_tx_hash ON cross_message_v2 (l2_tx_hash);
CREATE INDEX IF NOT EXISTS idx_cm_message_type_tx_status_sender_block_timestamp ON cross_message_v2 (message_type, tx_status, sender, block_timestamp DESC);
CREATE INDEX IF NOT EXISTS idx_cm_message_type_sender_block_timestamp ON cross_message_v2 (message_type, sender, block_timestamp DESC);
CREATE INDEX IF NOT EXISTS idx_cm_sender_block_timestamp ON cross_message_v2 (sender, block_timestamp DESC);
CREATE UNIQUE INDEX IF NOT EXISTS idx_cm_message_hash ON cross_message (message_hash);
CREATE INDEX IF NOT EXISTS idx_cm_message_type_l1_block_number ON cross_message (message_type, l1_block_number DESC);
CREATE INDEX IF NOT EXISTS idx_cm_message_type_l2_block_number ON cross_message (message_type, l2_block_number DESC);
CREATE INDEX IF NOT EXISTS idx_cm_message_type_message_nonce ON cross_message (message_type, message_nonce DESC);
CREATE INDEX IF NOT EXISTS idx_cm_l1_tx_hash ON cross_message (l1_tx_hash);
CREATE INDEX IF NOT EXISTS idx_cm_l2_tx_hash ON cross_message (l2_tx_hash);
CREATE INDEX IF NOT EXISTS idx_cm_message_type_tx_status_sender_block_timestamp ON cross_message (message_type, tx_status, sender, block_timestamp DESC);
CREATE INDEX IF NOT EXISTS idx_cm_message_type_sender_block_timestamp ON cross_message (message_type, sender, block_timestamp DESC);
CREATE INDEX IF NOT EXISTS idx_cm_sender_block_timestamp ON cross_message (sender, block_timestamp DESC);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP TABLE IF EXISTS cross_message_v2;
DROP TABLE IF EXISTS cross_message;
-- +goose StatementEnd

View File

@@ -1,6 +1,6 @@
-- +goose Up
-- +goose StatementBegin
CREATE TABLE batch_event_v2
CREATE TABLE batch_event
(
id BIGSERIAL PRIMARY KEY,
l1_block_number BIGINT NOT NULL,
@@ -15,15 +15,14 @@ CREATE TABLE batch_event_v2
deleted_at TIMESTAMP(0) DEFAULT NULL
);
CREATE UNIQUE INDEX IF NOT EXISTS unique_idx_be_batch_hash ON batch_event_v2 (batch_hash);
CREATE INDEX IF NOT EXISTS idx_be_l1_block_number ON batch_event_v2 (l1_block_number);
CREATE INDEX IF NOT EXISTS idx_be_batch_index ON batch_event_v2 (batch_index);
CREATE INDEX IF NOT EXISTS idx_be_batch_index_batch_hash ON batch_event_v2 (batch_index, batch_hash);
CREATE INDEX IF NOT EXISTS idx_be_end_block_number_update_status_batch_status_batch_index ON batch_event_v2 (end_block_number, update_status, batch_status, batch_index);
CREATE INDEX IF NOT EXISTS idx_be_l1_block_number ON batch_event (l1_block_number);
CREATE INDEX IF NOT EXISTS idx_be_batch_index ON batch_event (batch_index);
CREATE UNIQUE INDEX IF NOT EXISTS unique_idx_be_batch_index_batch_hash ON batch_event (batch_index, batch_hash);
CREATE INDEX IF NOT EXISTS idx_be_end_block_number_update_status_batch_index ON batch_event (end_block_number, update_status, batch_index);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP TABLE IF EXISTS batch_event_v2;
DROP TABLE IF EXISTS batch_event;
-- +goose StatementEnd

View File

@@ -4,8 +4,6 @@ import (
"net/http"
"github.com/gin-gonic/gin"
"scroll-tech/bridge-history-api/internal/orm"
)
const (
@@ -39,8 +37,8 @@ type QueryByHashRequest struct {
// ResultData contains return txs and total
type ResultData struct {
Results []*TxHistoryInfo `json:"results"`
Total uint64 `json:"total"`
Result []*TxHistoryInfo `json:"result"`
Total uint64 `json:"total"`
}
// Response the response schema
@@ -50,46 +48,37 @@ type Response struct {
Data interface{} `json:"data"`
}
// CounterpartChainTx is the schema of counterpart chain tx info
type CounterpartChainTx struct {
// Finalized the schema of tx finalized infos
type Finalized struct {
Hash string `json:"hash"`
BlockNumber uint64 `json:"block_number"`
BlockNumber uint64 `json:"blockNumber"`
}
// ClaimInfo is the schema of tx claim info
type ClaimInfo struct {
From string `json:"from"`
To string `json:"to"`
Value string `json:"value"`
Nonce string `json:"nonce"`
Message string `json:"message"`
Proof L2MessageProof `json:"proof"`
Claimable bool `json:"claimable"`
}
// L2MessageProof is the schema of L2 message proof
type L2MessageProof struct {
BatchIndex string `json:"batch_index"`
MerkleProof string `json:"merkle_proof"`
// UserClaimInfo the schema of tx claim infos
type UserClaimInfo struct {
From string `json:"from"`
To string `json:"to"`
Value string `json:"value"`
Nonce string `json:"nonce"`
Message string `json:"message"`
Proof string `json:"proof"`
BatchIndex string `json:"batch_index"`
Claimable bool `json:"claimable"`
}
// TxHistoryInfo the schema of tx history infos
type TxHistoryInfo struct {
Hash string `json:"hash"`
ReplayTxHash string `json:"replay_tx_hash"`
RefundTxHash string `json:"refund_tx_hash"`
MessageHash string `json:"message_hash"`
TokenType orm.TokenType `json:"token_type"` // 0: unknown, 1: eth, 2: erc20, 3: erc721, 4: erc1155
TokenIDs []string `json:"token_ids"` // only for erc721 and erc1155
TokenAmounts []string `json:"token_amounts"` // for eth and erc20, the length is 1, for erc721 and erc1155, the length could be > 1
MessageType orm.MessageType `json:"message_type"` // 0: unknown, 1: layer 1 message, 2: layer 2 message
L1TokenAddress string `json:"l1_token_address"`
L2TokenAddress string `json:"l2_token_address"`
BlockNumber uint64 `json:"block_number"`
TxStatus orm.TxStatusType `json:"tx_status"` // 0: sent, 1: sent failed, 2: relayed, 3: failed relayed, 4: relayed reverted, 5: skipped, 6: dropped
CounterpartChainTx *CounterpartChainTx `json:"counterpart_chain_tx"`
ClaimInfo *ClaimInfo `json:"claim_info"`
BlockTimestamp uint64 `json:"block_timestamp"`
Hash string `json:"hash"`
MsgHash string `json:"msgHash"`
Amount string `json:"amount"`
IsL1 bool `json:"isL1"`
L1Token string `json:"l1Token"`
L2Token string `json:"l2Token"`
BlockNumber uint64 `json:"blockNumber"`
TxStatus int `json:"txStatus"`
FinalizeTx *Finalized `json:"finalizeTx"`
ClaimInfo *UserClaimInfo `json:"claimInfo"`
BlockTimestamp uint64 `json:"blockTimestamp"`
}
// RenderJSON renders response with json

View File

@@ -14,6 +14,7 @@ import (
"github.com/scroll-tech/go-ethereum/crypto"
"github.com/scroll-tech/go-ethereum/ethclient"
"github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/rpc"
"golang.org/x/sync/errgroup"
backendabi "scroll-tech/bridge-history-api/abi"
@@ -115,8 +116,8 @@ func GetBatchRangeFromCalldata(calldata []byte) (uint64, uint64, error) {
return startBlock, finishBlock, err
}
// GetBlocksInRange gets a batch of blocks for a block range [start, end] inclusive.
func GetBlocksInRange(ctx context.Context, cli *ethclient.Client, start, end uint64) ([]*types.Block, error) {
// GetL1BlocksInRange gets a batch of blocks for a block range [start, end] inclusive.
func GetL1BlocksInRange(ctx context.Context, cli *ethclient.Client, start, end uint64) ([]*types.Block, error) {
var (
eg errgroup.Group
blocks = make([]*types.Block, end-start+1)
@@ -147,6 +148,38 @@ func GetBlocksInRange(ctx context.Context, cli *ethclient.Client, start, end uin
return blocks, nil
}
// GetL2BlocksInRange gets a batch of blocks for a block range [start, end] inclusive.
func GetL2BlocksInRange(ctx context.Context, cli *ethclient.Client, start, end uint64) ([]*types.BlockWithRowConsumption, error) {
var (
eg errgroup.Group
blocks = make([]*types.BlockWithRowConsumption, end-start+1)
concurrency = 32
sem = make(chan struct{}, concurrency)
)
for i := start; i <= end; i++ {
sem <- struct{}{} // Acquire a slot in the semaphore
blockNum := rpc.BlockNumberOrHashWithNumber(rpc.BlockNumber(i))
index := i - start
eg.Go(func() error {
defer func() { <-sem }() // Release the slot when done
block, err := cli.GetBlockByNumberOrHash(ctx, blockNum)
if err != nil {
log.Error("Failed to fetch block number", "number", blockNum, "error", err)
return err
}
blocks[index] = block
return nil
})
}
if err := eg.Wait(); err != nil {
log.Error("Error waiting for block fetching routines", "error", err)
return nil, err
}
return blocks, nil
}
// ConvertBigIntArrayToString convert the big int array to string
func ConvertBigIntArrayToString(array []*big.Int) string {
stringArray := make([]string, len(array))
@@ -158,19 +191,7 @@ func ConvertBigIntArrayToString(array []*big.Int) string {
return result
}
// ConvertStringToStringArray takes a string with values separated by commas and returns a slice of strings
func ConvertStringToStringArray(s string) []string {
if s == "" {
return []string{}
}
stringParts := strings.Split(s, ",")
for i, part := range stringParts {
stringParts[i] = strings.TrimSpace(part)
}
return stringParts
}
// GetSkippedQueueIndices gets the skipped queue indices
// GetSkippedQueueIndices get the skipped queue indices
func GetSkippedQueueIndices(startIndex uint64, skippedBitmap *big.Int) []uint64 {
var indices []uint64
for i := 0; i < 256; i++ {

View File

@@ -1,7 +1,6 @@
package utils
import (
"math/big"
"testing"
"github.com/scroll-tech/go-ethereum/common"
@@ -37,55 +36,3 @@ func TestGetBatchRangeFromCalldata(t *testing.T) {
assert.Equal(t, start, uint64(0))
assert.Equal(t, finish, uint64(0))
}
// TestConvertBigIntArrayToString tests the ConvertBigIntArrayToString function
func TestConvertBigIntArrayToString(t *testing.T) {
tests := []struct {
array []*big.Int
expected string
}{
{[]*big.Int{big.NewInt(1), big.NewInt(2), big.NewInt(3)}, "1, 2, 3"},
{[]*big.Int{big.NewInt(0), big.NewInt(-1)}, "0, -1"},
{[]*big.Int{}, ""},
}
for _, test := range tests {
got := ConvertBigIntArrayToString(test.array)
assert.Equal(t, test.expected, got)
}
}
// TestConvertStringToStringArray tests the ConvertStringToStringArray function
func TestConvertStringToStringArray(t *testing.T) {
tests := []struct {
s string
expected []string
}{
{"1, 2, 3", []string{"1", "2", "3"}},
{" 4 , 5 , 6 ", []string{"4", "5", "6"}},
{"", []string{}},
}
for _, test := range tests {
got := ConvertStringToStringArray(test.s)
assert.Equal(t, test.expected, got)
}
}
// TestGetSkippedQueueIndices tests the GetSkippedQueueIndices function
func TestGetSkippedQueueIndices(t *testing.T) {
tests := []struct {
startIndex uint64
bitmap *big.Int
expected []uint64
}{
{0, big.NewInt(0b101), []uint64{0, 2}},
{10, big.NewInt(0b110), []uint64{11, 12}},
{0, big.NewInt(0), nil}, // No bits set
}
for _, test := range tests {
got := GetSkippedQueueIndices(test.startIndex, test.bitmap)
assert.Equal(t, test.expected, got)
}
}

View File

@@ -46,71 +46,15 @@ func (w *WithdrawTrie) Initialize(currentMessageNonce uint64, msgHash common.Has
}
// AppendMessages appends a list of new messages as leaf nodes to the rightest of the tree and returns the proofs for all messages.
// The function correctly returns the proofs for the entire tree after all messages have been inserted, not the individual proofs after each insertion.
func (w *WithdrawTrie) AppendMessages(hashes []common.Hash) [][]byte {
length := len(hashes)
if length == 0 {
return make([][]byte, 0)
}
cache := make([]map[uint64]common.Hash, MaxHeight)
for h := 0; h < MaxHeight; h++ {
cache[h] = make(map[uint64]common.Hash)
}
// cache all branches will be used later.
if w.NextMessageNonce != 0 {
index := w.NextMessageNonce
for h := 0; h <= w.height; h++ {
if index%2 == 1 {
// right child, `w.branches[h]` is the corresponding left child
// the index of left child should be `index ^ 1`.
cache[h][index^1] = w.branches[h]
}
index >>= 1
}
}
// cache all new leaves
for i := 0; i < length; i++ {
cache[0][w.NextMessageNonce+uint64(i)] = hashes[i]
}
// build withdraw trie with new hashes
minIndex := w.NextMessageNonce
maxIndex := w.NextMessageNonce + uint64(length) - 1
for h := 0; maxIndex > 0; h++ {
if minIndex%2 == 1 {
minIndex--
}
if maxIndex%2 == 0 {
cache[h][maxIndex^1] = w.zeroes[h]
}
for i := minIndex; i <= maxIndex; i += 2 {
cache[h+1][i>>1] = Keccak2(cache[h][i], cache[h][i^1])
}
minIndex >>= 1
maxIndex >>= 1
}
// update branches using hashes one by one
for i := 0; i < length; i++ {
proof := updateBranchWithNewMessage(w.zeroes, w.branches, w.NextMessageNonce, hashes[i])
w.NextMessageNonce++
w.height = len(proof)
}
proofs := make([][]byte, length)
// retrieve merkle proof from cache
for i := 0; i < length; i++ {
index := w.NextMessageNonce + uint64(i) - uint64(length)
var merkleProof []common.Hash
for h := 0; h < w.height; h++ {
merkleProof = append(merkleProof, cache[h][index^1])
index >>= 1
}
merkleProof := updateBranchWithNewMessage(w.zeroes, w.branches, w.NextMessageNonce, hashes[i])
w.NextMessageNonce++
w.height = len(merkleProof)
proofs[i] = encodeMerkleProofToBytes(merkleProof)
}
return proofs
}

View File

@@ -17,5 +17,5 @@ RUN --mount=target=. \
FROM alpine:latest
COPY --from=builder /bin/bridgehistoryapi-api /bin/
WORKDIR /app
ENTRYPOINT ["bridgehistoryapi-api"]

View File

@@ -14,6 +14,7 @@ RUN --mount=target=. \
# Pull db_cli into a second stage deploy alpine container
FROM alpine:latest
COPY --from=builder /bin/db_cli /bin/
WORKDIR /app
ENTRYPOINT ["db_cli"]

View File

@@ -17,5 +17,5 @@ RUN --mount=target=. \
FROM alpine:latest
COPY --from=builder /bin/bridgehistoryapi-fetcher /bin/
WORKDIR /app
ENTRYPOINT ["bridgehistoryapi-fetcher"]

View File

@@ -46,5 +46,5 @@ RUN mkdir -p /src/coordinator/internal/logic/verifier/lib
COPY --from=builder /bin/lib /src/coordinator/internal/logic/verifier/lib
COPY --from=builder /bin/coordinator_api /bin/
RUN /bin/coordinator_api --version
WORKDIR /app
ENTRYPOINT ["/bin/coordinator_api"]

View File

@@ -21,5 +21,5 @@ RUN --mount=target=. \
# Pull coordinator into a second stage deploy alpine container
FROM alpine:latest
COPY --from=builder /bin/coordinator_cron /bin/
WORKDIR /app
ENTRYPOINT ["coordinator_cron"]

View File

@@ -21,6 +21,7 @@ RUN --mount=target=. \
# Pull db_cli into a second stage deploy alpine container
FROM alpine:latest
COPY --from=builder /bin/db_cli /bin/
WORKDIR /app
ENTRYPOINT ["db_cli"]

View File

@@ -21,6 +21,7 @@ RUN --mount=target=. \
# Pull event_watcher into a second stage deploy alpine container
FROM alpine:latest
COPY --from=builder /bin/event_watcher /bin/
WORKDIR /app
ENTRYPOINT ["event_watcher"]

View File

@@ -21,6 +21,7 @@ RUN --mount=target=. \
# Pull gas_oracle into a second stage deploy alpine container
FROM alpine:latest
COPY --from=builder /bin/gas_oracle /bin/
WORKDIR /app
ENTRYPOINT ["gas_oracle"]

View File

@@ -21,6 +21,7 @@ RUN --mount=target=. \
# Pull rollup_relayer into a second stage deploy alpine container
FROM alpine:latest
COPY --from=builder /bin/rollup_relayer /bin/
WORKDIR /app
ENTRYPOINT ["rollup_relayer"]

View File

@@ -95,7 +95,8 @@ func (c *Cmd) Write(data []byte) (int, error) {
if verbose || c.openLog {
fmt.Printf("%s:\n\t%v", c.name, out)
} else if strings.Contains(strings.ToLower(out), "error") ||
strings.Contains(strings.ToLower(out), "warning") {
strings.Contains(strings.ToLower(out), "warning") ||
strings.Contains(strings.ToLower(out), "info") {
fmt.Printf("%s:\n\t%v", c.name, out)
}
go c.checkFuncs.IterCb(func(_ string, value interface{}) {

View File

@@ -48,7 +48,7 @@ type App struct {
Timestamp int
}
// NewDockerApp returns new instance of dockerApp struct
// NewDockerApp returns new instance of dokerApp struct
func NewDockerApp() *App {
timestamp := time.Now().Nanosecond()
app := &App{

View File

@@ -21,8 +21,8 @@ const (
// GasOracleImported represents the gas oracle status is imported
GasOracleImported
// GasOracleImportedFailed represents the gas oracle status is imported failed
GasOracleImportedFailed
// GasOracleFailed represents the gas oracle status is failed
GasOracleFailed
)
func (s GasOracleStatus) String() string {
@@ -35,10 +35,10 @@ func (s GasOracleStatus) String() string {
return "GasOracleImporting"
case GasOracleImported:
return "GasOracleImported"
case GasOracleImportedFailed:
return "GasOracleImportedFailed"
case GasOracleFailed:
return "GasOracleFailed"
default:
return fmt.Sprintf("Undefined GasOracleStatus (%d)", int32(s))
return fmt.Sprintf("Undefined (%d)", int32(s))
}
}
@@ -159,7 +159,7 @@ func (ps ProvingStatus) String() string {
case ProvingTaskFailed:
return "failed"
default:
return fmt.Sprintf("Undefined ProvingStatus (%d)", int32(ps))
return fmt.Sprintf("Undefined (%d)", int32(ps))
}
}
@@ -184,7 +184,7 @@ func (s ChunkProofsStatus) String() string {
case ChunkProofsStatusReady:
return "ChunkProofsStatusReady"
default:
return fmt.Sprintf("Undefined ChunkProofsStatus (%d)", int32(s))
return fmt.Sprintf("Undefined (%d)", int32(s))
}
}
@@ -227,69 +227,6 @@ func (s RollupStatus) String() string {
case RollupFinalizeFailed:
return "RollupFinalizeFailed"
default:
return fmt.Sprintf("Undefined RollupStatus (%d)", int32(s))
}
}
// SenderType defines the various types of senders sending the transactions.
type SenderType int
const (
// SenderTypeUnknown indicates an unknown sender type.
SenderTypeUnknown SenderType = iota
// SenderTypeCommitBatch indicates the sender is responsible for committing batches.
SenderTypeCommitBatch
// SenderTypeFinalizeBatch indicates the sender is responsible for finalizing batches.
SenderTypeFinalizeBatch
// SenderTypeL1GasOracle indicates a sender from L2 responsible for updating L1 gas prices.
SenderTypeL1GasOracle
// SenderTypeL2GasOracle indicates a sender from L1 responsible for updating L2 gas prices.
SenderTypeL2GasOracle
)
// String returns a string representation of the SenderType.
func (t SenderType) String() string {
switch t {
case SenderTypeCommitBatch:
return "SenderTypeCommitBatch"
case SenderTypeFinalizeBatch:
return "SenderTypeFinalizeBatch"
case SenderTypeL1GasOracle:
return "SenderTypeL1GasOracle"
case SenderTypeL2GasOracle:
return "SenderTypeL2GasOracle"
default:
return fmt.Sprintf("Unknown SenderType (%d)", int32(t))
}
}
// TxStatus represents the current status of a transaction in the transaction lifecycle.
type TxStatus int
const (
// TxStatusUnknown represents an undefined status of the transaction.
TxStatusUnknown TxStatus = iota
// TxStatusPending indicates that the transaction is yet to be processed.
TxStatusPending
// TxStatusReplaced indicates that the transaction has been replaced by another one, typically due to a higher gas price.
TxStatusReplaced
// TxStatusConfirmed indicates that the transaction has been successfully processed and confirmed.
TxStatusConfirmed
// TxStatusConfirmedFailed indicates that the transaction has failed during processing.
TxStatusConfirmedFailed
)
func (s TxStatus) String() string {
switch s {
case TxStatusPending:
return "TxStatusPending"
case TxStatusReplaced:
return "TxStatusReplaced"
case TxStatusConfirmed:
return "TxStatusConfirmed"
case TxStatusConfirmedFailed:
return "TxStatusConfirmedFailed"
default:
return fmt.Sprintf("Unknown TxStatus (%d)", int32(s))
return fmt.Sprintf("Undefined (%d)", int32(s))
}
}

View File

@@ -75,7 +75,7 @@ func TestProvingStatus(t *testing.T) {
{
"Undefined",
ProvingStatus(999), // Invalid value.
"Undefined ProvingStatus (999)",
"Undefined (999)",
},
}
@@ -85,243 +85,3 @@ func TestProvingStatus(t *testing.T) {
})
}
}
func TestRollupStatus(t *testing.T) {
tests := []struct {
name string
s RollupStatus
want string
}{
{
"RollupUndefined",
RollupUndefined,
"Undefined RollupStatus (0)",
},
{
"RollupPending",
RollupPending,
"RollupPending",
},
{
"RollupCommitting",
RollupCommitting,
"RollupCommitting",
},
{
"RollupCommitted",
RollupCommitted,
"RollupCommitted",
},
{
"RollupFinalizing",
RollupFinalizing,
"RollupFinalizing",
},
{
"RollupFinalized",
RollupFinalized,
"RollupFinalized",
},
{
"RollupCommitFailed",
RollupCommitFailed,
"RollupCommitFailed",
},
{
"RollupFinalizeFailed",
RollupFinalizeFailed,
"RollupFinalizeFailed",
},
{
"Invalid Value",
RollupStatus(999),
"Undefined RollupStatus (999)",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
assert.Equal(t, tt.want, tt.s.String())
})
}
}
func TestSenderType(t *testing.T) {
tests := []struct {
name string
t SenderType
want string
}{
{
"SenderTypeUnknown",
SenderTypeUnknown,
"Unknown SenderType (0)",
},
{
"SenderTypeCommitBatch",
SenderTypeCommitBatch,
"SenderTypeCommitBatch",
},
{
"SenderTypeFinalizeBatch",
SenderTypeFinalizeBatch,
"SenderTypeFinalizeBatch",
},
{
"SenderTypeL1GasOracle",
SenderTypeL1GasOracle,
"SenderTypeL1GasOracle",
},
{
"SenderTypeL2GasOracle",
SenderTypeL2GasOracle,
"SenderTypeL2GasOracle",
},
{
"Invalid Value",
SenderType(999),
"Unknown SenderType (999)",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
assert.Equal(t, tt.want, tt.t.String())
})
}
}
func TestTxStatus(t *testing.T) {
tests := []struct {
name string
s TxStatus
want string
}{
{
"TxStatusUnknown",
TxStatusUnknown,
"Unknown TxStatus (0)",
},
{
"TxStatusPending",
TxStatusPending,
"TxStatusPending",
},
{
"TxStatusReplaced",
TxStatusReplaced,
"TxStatusReplaced",
},
{
"TxStatusConfirmed",
TxStatusConfirmed,
"TxStatusConfirmed",
},
{
"TxStatusConfirmedFailed",
TxStatusConfirmedFailed,
"TxStatusConfirmedFailed",
},
{
"Invalid Value",
TxStatus(999),
"Unknown TxStatus (999)",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
assert.Equal(t, tt.want, tt.s.String())
})
}
}
func TestGasOracleStatus(t *testing.T) {
tests := []struct {
name string
s GasOracleStatus
want string
}{
{
"GasOracleUndefined",
GasOracleUndefined,
"GasOracleUndefined",
},
{
"GasOraclePending",
GasOraclePending,
"GasOraclePending",
},
{
"GasOracleImporting",
GasOracleImporting,
"GasOracleImporting",
},
{
"GasOracleImported",
GasOracleImported,
"GasOracleImported",
},
{
"GasOracleImportedFailed",
GasOracleImportedFailed,
"GasOracleImportedFailed",
},
{
"Invalid Value",
GasOracleStatus(999),
"Undefined GasOracleStatus (999)",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
assert.Equal(t, tt.want, tt.s.String())
})
}
}
func TestProverTaskFailureType(t *testing.T) {
tests := []struct {
name string
r ProverTaskFailureType
want string
}{
{
"ProverTaskFailureTypeUndefined",
ProverTaskFailureTypeUndefined,
"prover task failure undefined",
},
{
"ProverTaskFailureTypeTimeout",
ProverTaskFailureTypeTimeout,
"prover task failure timeout",
},
{
"ProverTaskFailureTypeSubmitStatusNotOk",
ProverTaskFailureTypeSubmitStatusNotOk,
"prover task failure validated submit proof status not ok",
},
{
"ProverTaskFailureTypeVerifiedFailed",
ProverTaskFailureTypeVerifiedFailed,
"prover task failure verified failed",
},
{
"ProverTaskFailureTypeServerError",
ProverTaskFailureTypeServerError,
"prover task failure server exception",
},
{
"Invalid Value",
ProverTaskFailureType(999),
"illegal prover task failure type (999)",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
assert.Equal(t, tt.want, tt.r.String())
})
}
}

View File

@@ -5,7 +5,7 @@ import (
"runtime/debug"
)
var tag = "v4.3.65"
var tag = "v4.3.45"
var commit = func() string {
if info, ok := debug.ReadBuildInfo(); ok {

View File

@@ -1,10 +0,0 @@
module.exports = {
skipFiles: [
'mocks',
'test',
'L2/predeploys/L1BlockContainer.sol',
'libraries/verifier/ZkTrieVerifier.sol',
'libraries/verifier/PatriciaMerkleTrieVerifier.sol'
],
istanbulReporter: ["lcov", "json"]
};

View File

@@ -1,6 +1,6 @@
# Scroll Contracts
This directory contains the solidity code for Scroll L1 bridge and rollup contracts and L2 bridge and pre-deployed contracts. You can also find contract APIs and more details in the [`docs`](./docs) folder.
This directory contains the solidity code for Scroll L1 bridge and rollup contracts and L2 bridge and pre-deployed contracts. The [`specs`](../specs/) folder describes the overall Scroll protocol including the cross-domain messaging and rollup process. You can also find contract APIs and more details in the [`docs`](./docs) folder.
## Directory Structure

View File

@@ -1,3 +1 @@
declare module "circomlib/src/evmasm";
declare module "circomlib/src/poseidon_gencontract";
declare module "circomlib/src/poseidon_constants";

View File

@@ -550,50 +550,3 @@ Emitted when token mapping for ERC1155 token is updated.
## Errors
### ErrorCallerIsNotCounterpartGateway
```solidity
error ErrorCallerIsNotCounterpartGateway()
```
*Thrown when the cross chain sender is not the counterpart gateway contract.*
### ErrorCallerIsNotMessenger
```solidity
error ErrorCallerIsNotMessenger()
```
*Thrown when the caller is not corresponding `L1ScrollMessenger` or `L2ScrollMessenger`.*
### ErrorNotInDropMessageContext
```solidity
error ErrorNotInDropMessageContext()
```
*Thrown when ScrollMessenger is not dropping message.*
### ErrorZeroAddress
```solidity
error ErrorZeroAddress()
```
*Thrown when the given address is `address(0)`.*

View File

@@ -149,7 +149,7 @@ function initialize(address _counterpart, address _messenger) external nonpayabl
Initialize the storage of L1ERC721Gateway.
*The parameters `_counterpart` and `_messenger` are no longer used.*
#### Parameters
@@ -489,50 +489,3 @@ Emitted when token mapping for ERC721 token is updated.
## Errors
### ErrorCallerIsNotCounterpartGateway
```solidity
error ErrorCallerIsNotCounterpartGateway()
```
*Thrown when the cross chain sender is not the counterpart gateway contract.*
### ErrorCallerIsNotMessenger
```solidity
error ErrorCallerIsNotMessenger()
```
*Thrown when the caller is not corresponding `L1ScrollMessenger` or `L2ScrollMessenger`.*
### ErrorNotInDropMessageContext
```solidity
error ErrorNotInDropMessageContext()
```
*Thrown when ScrollMessenger is not dropping message.*
### ErrorZeroAddress
```solidity
error ErrorZeroAddress()
```
*Thrown when the given address is `address(0)`.*

View File

@@ -168,7 +168,7 @@ function ethGateway() external view returns (address)
The address of L1ETHGateway.
*This variable is no longer used.*
#### Returns
@@ -286,7 +286,7 @@ function initialize(address _ethGateway, address _defaultERC20Gateway) external
Initialize the storage of L1GatewayRouter.
*The parameters `_ethGateway` is no longer used.*
#### Parameters
@@ -295,23 +295,6 @@ Initialize the storage of L1GatewayRouter.
| _ethGateway | address | The address of L1ETHGateway contract. |
| _defaultERC20Gateway | address | The address of default ERC20 Gateway contract. |
### messenger
```solidity
function messenger() external view returns (address)
```
The address of `L1ScrollMessenger`.
#### Returns
| Name | Type | Description |
|---|---|---|
| _0 | address | undefined |
### owner
```solidity
@@ -635,17 +618,3 @@ Emitted when the address of ETH Gateway is updated.
## Errors
### ErrorZeroAddress
```solidity
error ErrorZeroAddress()
```
*Thrown when the given address is `address(0)`.*

View File

@@ -72,7 +72,7 @@ function initialize(address _counterpart, address _feeVault, address _rollup, ad
Initialize the storage of L1ScrollMessenger.
*The parameters `_counterpart`, `_rollup` and `_messageQueue` are no longer used.*
#### Parameters
@@ -611,17 +611,3 @@ Emitted when the maximum number of times each message can be replayed is updated
## Errors
### ErrorZeroAddress
```solidity
error ErrorZeroAddress()
```
*Thrown when the given address is `address(0)`.*

View File

@@ -135,7 +135,7 @@ function initialize(address _counterpart, address _router, address _messenger, a
Initialize the storage of L1StandardERC20Gateway.
*The parameters `_counterpart`, `_router`, `_messenger`, `_l2TokenImplementation` and `_l2TokenFactory` are no longer used.*
#### Parameters
@@ -374,50 +374,3 @@ Emitted when some ERC20 token is refunded.
## Errors
### ErrorCallerIsNotCounterpartGateway
```solidity
error ErrorCallerIsNotCounterpartGateway()
```
*Thrown when the cross chain sender is not the counterpart gateway contract.*
### ErrorCallerIsNotMessenger
```solidity
error ErrorCallerIsNotMessenger()
```
*Thrown when the caller is not corresponding `L1ScrollMessenger` or `L2ScrollMessenger`.*
### ErrorNotInDropMessageContext
```solidity
error ErrorNotInDropMessageContext()
```
*Thrown when ScrollMessenger is not dropping message.*
### ErrorZeroAddress
```solidity
error ErrorZeroAddress()
```
*Thrown when the given address is `address(0)`.*

View File

@@ -372,50 +372,3 @@ Emitted when some ERC20 token is refunded.
## Errors
### ErrorCallerIsNotCounterpartGateway
```solidity
error ErrorCallerIsNotCounterpartGateway()
```
*Thrown when the cross chain sender is not the counterpart gateway contract.*
### ErrorCallerIsNotMessenger
```solidity
error ErrorCallerIsNotMessenger()
```
*Thrown when the caller is not corresponding `L1ScrollMessenger` or `L2ScrollMessenger`.*
### ErrorNotInDropMessageContext
```solidity
error ErrorNotInDropMessageContext()
```
*Thrown when ScrollMessenger is not dropping message.*
### ErrorZeroAddress
```solidity
error ErrorZeroAddress()
```
*Thrown when the given address is `address(0)`.*

View File

@@ -114,16 +114,16 @@ Complete ERC1155 deposit from layer 1 to layer 2 and send NFT to recipient&#39;s
function initialize(address _counterpart, address _messenger) external nonpayable
```
Initialize the storage of `L2ERC1155Gateway`.
*The parameters `_counterpart` and `_messenger` are no longer used.*
#### Parameters
| Name | Type | Description |
|---|---|---|
| _counterpart | address | The address of `L1ERC1155Gateway` contract in L1. |
| _messenger | address | The address of `L2ScrollMessenger` contract in L2. |
| _counterpart | address | undefined |
| _messenger | address | undefined |
### messenger
@@ -496,50 +496,3 @@ Emitted when the ERC1155 NFT is transfered to gateway on layer 2.
## Errors
### ErrorCallerIsNotCounterpartGateway
```solidity
error ErrorCallerIsNotCounterpartGateway()
```
*Thrown when the cross chain sender is not the counterpart gateway contract.*
### ErrorCallerIsNotMessenger
```solidity
error ErrorCallerIsNotMessenger()
```
*Thrown when the caller is not corresponding `L1ScrollMessenger` or `L2ScrollMessenger`.*
### ErrorNotInDropMessageContext
```solidity
error ErrorNotInDropMessageContext()
```
*Thrown when ScrollMessenger is not dropping message.*
### ErrorZeroAddress
```solidity
error ErrorZeroAddress()
```
*Thrown when the given address is `address(0)`.*

View File

@@ -110,16 +110,16 @@ Complete ERC721 deposit from layer 1 to layer 2 and send NFT to recipient&#39;s
function initialize(address _counterpart, address _messenger) external nonpayable
```
Initialize the storage of `L2ERC721Gateway`.
*The parameters `_counterpart` and `_messenger` are no longer used.*
#### Parameters
| Name | Type | Description |
|---|---|---|
| _counterpart | address | The address of `L1ERC721Gateway` contract in L1. |
| _messenger | address | The address of `L2ScrollMessenger` contract in L2. |
| _counterpart | address | undefined |
| _messenger | address | undefined |
### messenger
@@ -437,50 +437,3 @@ Emitted when the ERC721 NFT is transfered to gateway on layer 2.
## Errors
### ErrorCallerIsNotCounterpartGateway
```solidity
error ErrorCallerIsNotCounterpartGateway()
```
*Thrown when the cross chain sender is not the counterpart gateway contract.*
### ErrorCallerIsNotMessenger
```solidity
error ErrorCallerIsNotMessenger()
```
*Thrown when the caller is not corresponding `L1ScrollMessenger` or `L2ScrollMessenger`.*
### ErrorNotInDropMessageContext
```solidity
error ErrorNotInDropMessageContext()
```
*Thrown when ScrollMessenger is not dropping message.*
### ErrorZeroAddress
```solidity
error ErrorZeroAddress()
```
*Thrown when the given address is `address(0)`.*

View File

@@ -189,23 +189,6 @@ function initialize(address _ethGateway, address _defaultERC20Gateway) external
| _ethGateway | address | undefined |
| _defaultERC20Gateway | address | undefined |
### messenger
```solidity
function messenger() external view returns (address)
```
The address of `L2ScrollMessenger`.
#### Returns
| Name | Type | Description |
|---|---|---|
| _0 | address | undefined |
### owner
```solidity
@@ -581,17 +564,3 @@ Emitted when someone withdraw ETH from L2 to L1.
## Errors
### ErrorZeroAddress
```solidity
error ErrorZeroAddress()
```
*Thrown when the given address is `address(0)`.*

View File

@@ -47,7 +47,7 @@ The address of fee vault, collecting cross domain messaging fee.
### initialize
```solidity
function initialize(address) external nonpayable
function initialize(address _counterpart) external nonpayable
```
@@ -58,7 +58,7 @@ function initialize(address) external nonpayable
| Name | Type | Description |
|---|---|---|
| _0 | address | undefined |
| _counterpart | address | undefined |
### isL1MessageExecuted
@@ -448,17 +448,3 @@ Emitted when the maximum number of times each message can fail in L2 is updated.
## Errors
### ErrorZeroAddress
```solidity
error ErrorZeroAddress()
```
*Thrown when the given address is `address(0)`.*

View File

@@ -98,18 +98,18 @@ Return the corresponding l2 token address given l1 token address.
function initialize(address _counterpart, address _router, address _messenger, address _tokenFactory) external nonpayable
```
Initialize the storage of L2StandardERC20Gateway.
*The parameters `_counterpart`, `_router`, `_messenger` and `_tokenFactory` are no longer used.*
#### Parameters
| Name | Type | Description |
|---|---|---|
| _counterpart | address | The address of L1ETHGateway in L1. |
| _router | address | The address of L2GatewayRouter. |
| _messenger | address | The address of L2ScrollMessenger. |
| _tokenFactory | address | The address of ScrollStandardERC20Factory. |
| _counterpart | address | undefined |
| _router | address | undefined |
| _messenger | address | undefined |
| _tokenFactory | address | undefined |
### messenger
@@ -344,50 +344,3 @@ Emitted when someone withdraw ERC20 token from L2 to L1.
## Errors
### ErrorCallerIsNotCounterpartGateway
```solidity
error ErrorCallerIsNotCounterpartGateway()
```
*Thrown when the cross chain sender is not the counterpart gateway contract.*
### ErrorCallerIsNotMessenger
```solidity
error ErrorCallerIsNotMessenger()
```
*Thrown when the caller is not corresponding `L1ScrollMessenger` or `L2ScrollMessenger`.*
### ErrorNotInDropMessageContext
```solidity
error ErrorNotInDropMessageContext()
```
*Thrown when ScrollMessenger is not dropping message.*
### ErrorZeroAddress
```solidity
error ErrorZeroAddress()
```
*Thrown when the given address is `address(0)`.*

View File

@@ -115,17 +115,17 @@ Return the corresponding l2 token address given l1 token address.
function initialize(address _counterpart, address _router, address _messenger) external nonpayable
```
Initialize the storage of `L2WETHGateway`.
*The parameters `_counterpart`, `_router` and `_messenger` are no longer used.*
#### Parameters
| Name | Type | Description |
|---|---|---|
| _counterpart | address | The address of `L1WETHGateway` contract in L1. |
| _router | address | The address of `L2GatewayRouter` contract in L2. |
| _messenger | address | The address of `L2ScrollMessenger` contract in L2. |
| _counterpart | address | undefined |
| _router | address | undefined |
| _messenger | address | undefined |
### l1WETH
@@ -360,50 +360,3 @@ Emitted when someone withdraw ERC20 token from L2 to L1.
## Errors
### ErrorCallerIsNotCounterpartGateway
```solidity
error ErrorCallerIsNotCounterpartGateway()
```
*Thrown when the cross chain sender is not the counterpart gateway contract.*
### ErrorCallerIsNotMessenger
```solidity
error ErrorCallerIsNotMessenger()
```
*Thrown when the caller is not corresponding `L1ScrollMessenger` or `L2ScrollMessenger`.*
### ErrorNotInDropMessageContext
```solidity
error ErrorNotInDropMessageContext()
```
*Thrown when ScrollMessenger is not dropping message.*
### ErrorZeroAddress
```solidity
error ErrorZeroAddress()
```
*Thrown when the given address is `address(0)`.*

View File

@@ -148,17 +148,17 @@ Import layer 2 genesis block
function initialize(address _messageQueue, address _verifier, uint256 _maxNumTxInChunk) external nonpayable
```
Initialize the storage of ScrollChain.
*The parameters `_messageQueue` are no longer used.*
#### Parameters
| Name | Type | Description |
|---|---|---|
| _messageQueue | address | The address of `L1MessageQueue` contract. |
| _verifier | address | The address of zkevm verifier contract. |
| _maxNumTxInChunk | uint256 | The maximum number of transactions allowed in each chunk. |
| _messageQueue | address | undefined |
| _verifier | address | undefined |
| _maxNumTxInChunk | uint256 | undefined |
### isBatchFinalized
@@ -283,7 +283,7 @@ The maximum number of transactions allowed in each chunk.
function messageQueue() external view returns (address)
```
The address of L1MessageQueue contract.
The address of L1MessageQueue.
@@ -436,6 +436,22 @@ Update the value of `maxNumTxInChunk`.
|---|---|---|
| _maxNumTxInChunk | uint256 | The new value of `maxNumTxInChunk`. |
### updateVerifier
```solidity
function updateVerifier(address _newVerifier) external nonpayable
```
Update the address verifier contract.
#### Parameters
| Name | Type | Description |
|---|---|---|
| _newVerifier | address | The address of new verifier contract. |
### verifier
```solidity
@@ -648,19 +664,22 @@ Emitted when owner updates the status of sequencer.
| account `indexed` | address | The address of account updated. |
| status | bool | The status of the account updated. |
## Errors
### ErrorZeroAddress
### UpdateVerifier
```solidity
error ErrorZeroAddress()
event UpdateVerifier(address indexed oldVerifier, address indexed newVerifier)
```
*Thrown when the given address is `address(0)`.*
Emitted when the address of rollup verifier is updated.
#### Parameters
| Name | Type | Description |
|---|---|---|
| oldVerifier `indexed` | address | The address of old rollup verifier. |
| newVerifier `indexed` | address | The address of new rollup verifier. |

View File

@@ -20,6 +20,7 @@ sender = '0x00a329c0648769a73afac7f9381e08fb43dbea72' # the address of `
tx_origin = '0x00a329c0648769a73afac7f9381e08fb43dbea72' # the address of `tx.origin` in tests
initial_balance = '0xffffffffffffffffffffffff' # the initial balance of the test contract
block_number = 0 # the block number we are at in tests
chain_id = 99 # the chain id we are on in tests
gas_limit = 9223372036854775807 # the gas limit in tests
gas_price = 0 # the gas price (in wei) in tests
block_base_fee_per_gas = 0 # the base fee (in wei) in tests

View File

@@ -17,10 +17,10 @@ describe("EnforcedTxGateway.spec", async () => {
let oracle: L2GasPriceOracle;
let queue: L1MessageQueue;
const deployProxy = async (name: string, admin: string, args: any[]): Promise<string> => {
const deployProxy = async (name: string, admin: string): Promise<string> => {
const TransparentUpgradeableProxy = await ethers.getContractFactory("TransparentUpgradeableProxy", deployer);
const Factory = await ethers.getContractFactory(name, deployer);
const impl = args.length > 0 ? await Factory.deploy(...args) : await Factory.deploy();
const impl = await Factory.deploy();
await impl.deployed();
const proxy = await TransparentUpgradeableProxy.deploy(impl.address, admin, "0x");
await proxy.deployed();
@@ -34,21 +34,17 @@ describe("EnforcedTxGateway.spec", async () => {
const admin = await ProxyAdmin.deploy();
await admin.deployed();
gateway = await ethers.getContractAt(
"EnforcedTxGateway",
await deployProxy("EnforcedTxGateway", admin.address, []),
deployer
);
queue = await ethers.getContractAt(
"L1MessageQueue",
await deployProxy("L1MessageQueue", admin.address, [deployer.address, deployer.address, gateway.address]),
deployer
);
queue = await ethers.getContractAt("L1MessageQueue", await deployProxy("L1MessageQueue", admin.address), deployer);
oracle = await ethers.getContractAt(
"L2GasPriceOracle",
await deployProxy("L2GasPriceOracle", admin.address, []),
await deployProxy("L2GasPriceOracle", admin.address),
deployer
);
gateway = await ethers.getContractAt(
"EnforcedTxGateway",
await deployProxy("EnforcedTxGateway", admin.address),
deployer
);
@@ -56,13 +52,7 @@ describe("EnforcedTxGateway.spec", async () => {
caller = await MockCaller.deploy();
await caller.deployed();
await queue.initialize(
constants.AddressZero,
constants.AddressZero,
constants.AddressZero,
oracle.address,
10000000
);
await queue.initialize(constants.AddressZero, constants.AddressZero, gateway.address, oracle.address, 10000000);
await gateway.initialize(queue.address, feeVault.address);
await oracle.initialize(21000, 51000, 8, 16);

View File

@@ -1,667 +0,0 @@
/* eslint-disable node/no-missing-import */
/* eslint-disable node/no-unpublished-import */
import { SignerWithAddress } from "@nomiclabs/hardhat-ethers/signers";
import { expect } from "chai";
import { BigNumber, BigNumberish, ContractTransaction, constants } from "ethers";
import { keccak256 } from "ethers/lib/utils";
import { ethers, network } from "hardhat";
import {
ProxyAdmin,
L1GatewayRouter,
L2ScrollMessenger,
L1ScrollMessenger,
L1MessageQueueWithGasPriceOracle,
L2GatewayRouter,
} from "../typechain";
describe("GasOptimizationUpgrade.spec", async () => {
const L1_ROUTER = "0xF8B1378579659D8F7EE5f3C929c2f3E332E41Fd6";
const L2_ROUTER = "0x4C0926FF5252A435FD19e10ED15e5a249Ba19d79";
const L1_MESSENGER = "0x6774Bcbd5ceCeF1336b5300fb5186a12DDD8b367";
const L2_MESSENGER = "0x781e90f1c8Fc4611c9b7497C3B47F99Ef6969CbC";
const L1_MESSAGE_QUEUE = "0x0d7E906BD9cAFa154b048cFa766Cc1E54E39AF9B";
const L2_MESSAGE_QUEUE = "0x5300000000000000000000000000000000000000";
const SCROLL_CHAIN = "0xa13BAF47339d63B743e7Da8741db5456DAc1E556";
let deployer: SignerWithAddress;
let proxyAdmin: ProxyAdmin;
const mockERC20Balance = async (tokenAddress: string, balance: BigNumber, slot: BigNumberish) => {
const storageSlot = keccak256(
ethers.utils.defaultAbiCoder.encode(["address", "uint256"], [deployer.address, slot])
);
await ethers.provider.send("hardhat_setStorageAt", [
tokenAddress,
storageSlot,
ethers.utils.hexlify(ethers.utils.zeroPad(balance.toHexString(), 32)),
]);
const token = await ethers.getContractAt("MockERC20", tokenAddress, deployer);
expect(await token.balanceOf(deployer.address)).to.eq(balance);
};
const mockETHBalance = async (balance: BigNumber) => {
await network.provider.send("hardhat_setBalance", [deployer.address, balance.toHexString()]);
expect(await deployer.getBalance()).to.eq(balance);
};
const showGasUsage = async (tx: ContractTransaction, desc: string) => {
const receipt = await tx.wait();
console.log(`${desc}: GasUsed[${receipt.gasUsed}]`);
};
context("L1 upgrade", async () => {
let forkBlock: number;
let router: L1GatewayRouter;
let messenger: L1ScrollMessenger;
let queue: L1MessageQueueWithGasPriceOracle;
beforeEach(async () => {
// fork network
const provider = new ethers.providers.JsonRpcProvider("https://rpc.ankr.com/eth");
if (!forkBlock) {
forkBlock = (await provider.getBlockNumber()) - 10;
}
await network.provider.request({
method: "hardhat_reset",
params: [
{
forking: {
jsonRpcUrl: "https://rpc.ankr.com/eth",
blockNumber: forkBlock,
},
},
],
});
await network.provider.request({
method: "hardhat_impersonateAccount",
params: ["0x1100000000000000000000000000000000000011"],
});
// mock eth balance
deployer = await ethers.getSigner("0x1100000000000000000000000000000000000011");
await mockETHBalance(ethers.utils.parseEther("1000"));
// mock owner of proxy admin
proxyAdmin = await ethers.getContractAt("ProxyAdmin", "0xEB803eb3F501998126bf37bB823646Ed3D59d072", deployer);
await ethers.provider.send("hardhat_setStorageAt", [
proxyAdmin.address,
"0x0",
ethers.utils.hexlify(ethers.utils.zeroPad(deployer.address, 32)),
]);
expect(await proxyAdmin.owner()).to.eq(deployer.address);
router = await ethers.getContractAt("L1GatewayRouter", L1_ROUTER, deployer);
messenger = await ethers.getContractAt("L1ScrollMessenger", L1_MESSENGER, deployer);
queue = await ethers.getContractAt("L1MessageQueueWithGasPriceOracle", L1_MESSAGE_QUEUE, deployer);
});
const upgradeL1 = async (proxy: string, impl: string) => {
await proxyAdmin.upgrade(proxy, impl);
const L1ScrollMessenger = await ethers.getContractFactory("L1ScrollMessenger", deployer);
const L1MessageQueueWithGasPriceOracle = await ethers.getContractFactory(
"L1MessageQueueWithGasPriceOracle",
deployer
);
const ScrollChain = await ethers.getContractFactory("ScrollChain", deployer);
await proxyAdmin.upgrade(
L1_MESSENGER,
(
await L1ScrollMessenger.deploy(L2_MESSENGER, SCROLL_CHAIN, L1_MESSAGE_QUEUE)
).address
);
await proxyAdmin.upgrade(
L1_MESSAGE_QUEUE,
(
await L1MessageQueueWithGasPriceOracle.deploy(
L1_MESSENGER,
SCROLL_CHAIN,
"0x72CAcBcfDe2d1e19122F8A36a4d6676cd39d7A5d"
)
).address
);
await queue.initializeV2();
await proxyAdmin.upgrade(
SCROLL_CHAIN,
(
await ScrollChain.deploy(534352, L1_MESSAGE_QUEUE, "0xA2Ab526e5C5491F10FC05A55F064BF9F7CEf32a0")
).address
);
};
it.skip("should succeed on L1ETHGateway", async () => {
const L1_GATEWAY = "0x7F2b8C31F88B6006c382775eea88297Ec1e3E905";
const L2_GATEWAY = "0x6EA73e05AdC79974B931123675ea8F78FfdacDF0";
const L1ETHGateway = await ethers.getContractFactory("L1ETHGateway", deployer);
const impl = await L1ETHGateway.deploy(L2_GATEWAY, L1_ROUTER, L1_MESSENGER);
const gateway = await ethers.getContractAt("L1ETHGateway", L1_GATEWAY, deployer);
const amountIn = ethers.utils.parseEther("1");
const fee = await queue.estimateCrossDomainMessageFee(1e6);
// before upgrade
await showGasUsage(
await gateway["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn.add(fee) }),
"L1ETHGateway.depositETH before upgrade"
);
await showGasUsage(
await router["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn.add(fee) }),
"L1GatewayRouter.depositETH before upgrade"
);
await showGasUsage(
await messenger["sendMessage(address,uint256,bytes,uint256)"](deployer.address, amountIn, "0x", 1e6, {
value: amountIn.add(fee),
}),
"L1ScrollMessenger.sendMessage before upgrade"
);
// do upgrade
await upgradeL1(L1_GATEWAY, impl.address);
// after upgrade
await showGasUsage(
await gateway["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn.add(fee) }),
"L1ETHGateway.depositETH after upgrade"
);
await showGasUsage(
await router["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn.add(fee) }),
"L1GatewayRouter.depositETH after upgrade"
);
await showGasUsage(
await messenger["sendMessage(address,uint256,bytes,uint256)"](deployer.address, amountIn, "0x", 1e6, {
value: amountIn.add(fee),
}),
"L1ScrollMessenger.sendMessage after upgrade"
);
});
it.skip("should succeed on L1WETHGateway", async () => {
const L1_WETH = "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2";
const L2_WETH = "0x5300000000000000000000000000000000000004";
const L1_GATEWAY = "0x7AC440cAe8EB6328de4fA621163a792c1EA9D4fE";
const L2_GATEWAY = "0x7003E7B7186f0E6601203b99F7B8DECBfA391cf9";
const L1WETHGateway = await ethers.getContractFactory("L1WETHGateway", deployer);
const impl = await L1WETHGateway.deploy(L1_WETH, L2_WETH, L2_GATEWAY, L1_ROUTER, L1_MESSENGER);
const gateway = await ethers.getContractAt("L1WETHGateway", L1_GATEWAY, deployer);
const amountIn = ethers.utils.parseEther("1");
const fee = await queue.estimateCrossDomainMessageFee(1e6);
const token = await ethers.getContractAt("MockERC20", L1_WETH, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 3);
await token.approve(L1_GATEWAY, constants.MaxUint256);
await token.approve(L1_ROUTER, constants.MaxUint256);
// before upgrade
await showGasUsage(
await gateway["depositERC20(address,uint256,uint256)"](L1_WETH, amountIn, 1e6, { value: fee }),
"L1WETHGateway.depositERC20 WETH before upgrade"
);
await showGasUsage(
await router["depositERC20(address,uint256,uint256)"](L1_WETH, amountIn, 1e6, { value: fee }),
"L1GatewayRouter.depositERC20 WETH before upgrade"
);
// do upgrade
await upgradeL1(L1_GATEWAY, impl.address);
// after upgrade
await showGasUsage(
await gateway["depositERC20(address,uint256,uint256)"](L1_WETH, amountIn, 1e6, { value: fee }),
"L1WETHGateway.depositERC20 WETH after upgrade"
);
await showGasUsage(
await router["depositERC20(address,uint256,uint256)"](L1_WETH, amountIn, 1e6, { value: fee }),
"L1GatewayRouter.depositERC20 WETH after upgrade"
);
});
it.skip("should succeed on L1StandardERC20Gateway", async () => {
const L1_USDT = "0xdAC17F958D2ee523a2206206994597C13D831ec7";
const L1_GATEWAY = "0xD8A791fE2bE73eb6E6cF1eb0cb3F36adC9B3F8f9";
const L2_GATEWAY = "0xE2b4795039517653c5Ae8C2A9BFdd783b48f447A";
const L1StandardERC20Gateway = await ethers.getContractFactory("L1StandardERC20Gateway", deployer);
const impl = await L1StandardERC20Gateway.deploy(
L2_GATEWAY,
L1_ROUTER,
L1_MESSENGER,
"0xC7d86908ccf644Db7C69437D5852CedBC1aD3f69",
"0x66e5312EDeEAef6e80759A0F789e7914Fb401484"
);
const gateway = await ethers.getContractAt("L1StandardERC20Gateway", L1_GATEWAY, deployer);
const amountIn = ethers.utils.parseUnits("1", 6);
const fee = await queue.estimateCrossDomainMessageFee(1e6);
const token = await ethers.getContractAt("MockERC20", L1_USDT, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 2);
await token.approve(L1_GATEWAY, constants.MaxUint256);
await token.approve(L1_ROUTER, constants.MaxUint256);
// before upgrade
await showGasUsage(
await gateway["depositERC20(address,uint256,uint256)"](L1_USDT, amountIn, 1e6, { value: fee }),
"L1StandardERC20Gateway.depositERC20 USDT before upgrade"
);
await showGasUsage(
await router["depositERC20(address,uint256,uint256)"](L1_USDT, amountIn, 1e6, { value: fee }),
"L1GatewayRouter.depositERC20 USDT before upgrade"
);
// do upgrade
await upgradeL1(L1_GATEWAY, impl.address);
// after upgrade
await showGasUsage(
await gateway["depositERC20(address,uint256,uint256)"](L1_USDT, amountIn, 1e6, { value: fee }),
"L1StandardERC20Gateway.depositERC20 USDT after upgrade"
);
await showGasUsage(
await router["depositERC20(address,uint256,uint256)"](L1_USDT, amountIn, 1e6, { value: fee }),
"L1GatewayRouter.depositERC20 USDT after upgrade"
);
});
it.skip("should succeed on L1CustomERC20Gateway", async () => {
const L1_DAI = "0x6B175474E89094C44Da98b954EedeAC495271d0F";
const L1_GATEWAY = "0x67260A8B73C5B77B55c1805218A42A7A6F98F515";
const L2_GATEWAY = "0xaC78dff3A87b5b534e366A93E785a0ce8fA6Cc62";
const L1CustomERC20Gateway = await ethers.getContractFactory("L1CustomERC20Gateway", deployer);
const impl = await L1CustomERC20Gateway.deploy(L2_GATEWAY, L1_ROUTER, L1_MESSENGER);
const gateway = await ethers.getContractAt("L1CustomERC20Gateway", L1_GATEWAY, deployer);
const amountIn = ethers.utils.parseUnits("1", 18);
const fee = await queue.estimateCrossDomainMessageFee(1e6);
const token = await ethers.getContractAt("MockERC20", L1_DAI, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 2);
await token.approve(L1_GATEWAY, constants.MaxUint256);
await token.approve(L1_ROUTER, constants.MaxUint256);
// before upgrade
await showGasUsage(
await gateway["depositERC20(address,uint256,uint256)"](L1_DAI, amountIn, 1e6, { value: fee }),
"L1CustomERC20Gateway.depositERC20 DAI before upgrade"
);
await showGasUsage(
await router["depositERC20(address,uint256,uint256)"](L1_DAI, amountIn, 1e6, { value: fee }),
"L1GatewayRouter.depositERC20 DAI before upgrade"
);
// do upgrade
await upgradeL1(L1_GATEWAY, impl.address);
// after upgrade
await showGasUsage(
await gateway["depositERC20(address,uint256,uint256)"](L1_DAI, amountIn, 1e6, { value: fee }),
"L1CustomERC20Gateway.depositERC20 DAI after upgrade"
);
await showGasUsage(
await router["depositERC20(address,uint256,uint256)"](L1_DAI, amountIn, 1e6, { value: fee }),
"L1GatewayRouter.depositERC20 DAI after upgrade"
);
});
it.skip("should succeed on L1USDCGateway", async () => {
const L1_USDC = "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48";
const L2_USDC = "0x06eFdBFf2a14a7c8E15944D1F4A48F9F95F663A4";
const L1_GATEWAY = "0xf1AF3b23DE0A5Ca3CAb7261cb0061C0D779A5c7B";
const L2_GATEWAY = "0x33B60d5Dd260d453cAC3782b0bDC01ce84672142";
const L1USDCGateway = await ethers.getContractFactory("L1USDCGateway", deployer);
const impl = await L1USDCGateway.deploy(L1_USDC, L2_USDC, L2_GATEWAY, L1_ROUTER, L1_MESSENGER);
const gateway = await ethers.getContractAt("L1USDCGateway", L1_GATEWAY, deployer);
const amountIn = ethers.utils.parseUnits("1", 6);
const fee = await queue.estimateCrossDomainMessageFee(1e6);
const token = await ethers.getContractAt("MockERC20", L1_USDC, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 9);
await token.approve(L1_GATEWAY, constants.MaxUint256);
await token.approve(L1_ROUTER, constants.MaxUint256);
// before upgrade
await showGasUsage(
await gateway["depositERC20(address,uint256,uint256)"](L1_USDC, amountIn, 1e6, { value: fee }),
"L1USDCGateway.depositERC20 USDC before upgrade"
);
await showGasUsage(
await router["depositERC20(address,uint256,uint256)"](L1_USDC, amountIn, 1e6, { value: fee }),
"L1GatewayRouter.depositERC20 USDC before upgrade"
);
// do upgrade
await upgradeL1(L1_GATEWAY, impl.address);
// after upgrade
await showGasUsage(
await gateway["depositERC20(address,uint256,uint256)"](L1_USDC, amountIn, 1e6, { value: fee }),
"L1USDCGateway.depositERC20 USDC after upgrade"
);
await showGasUsage(
await router["depositERC20(address,uint256,uint256)"](L1_USDC, amountIn, 1e6, { value: fee }),
"L1GatewayRouter.depositERC20 USDC after upgrade"
);
});
it.skip("should succeed on L1LidoGateway", async () => {
const L1_WSTETH = "0x7f39C581F595B53c5cb19bD0b3f8dA6c935E2Ca0";
const L2_WSTETH = "0xf610A9dfB7C89644979b4A0f27063E9e7d7Cda32";
const L1_GATEWAY = "0x6625C6332c9F91F2D27c304E729B86db87A3f504";
const L2_GATEWAY = "0x8aE8f22226B9d789A36AC81474e633f8bE2856c9";
const L1LidoGateway = await ethers.getContractFactory("L1LidoGateway", deployer);
const impl = await L1LidoGateway.deploy(L1_WSTETH, L2_WSTETH, L2_GATEWAY, L1_ROUTER, L1_MESSENGER);
const gateway = await ethers.getContractAt("L1LidoGateway", L1_GATEWAY, deployer);
const amountIn = ethers.utils.parseUnits("1", 6);
const fee = await queue.estimateCrossDomainMessageFee(1e6);
const token = await ethers.getContractAt("MockERC20", L1_WSTETH, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 0);
await token.approve(L1_GATEWAY, constants.MaxUint256);
await token.approve(L1_ROUTER, constants.MaxUint256);
// before upgrade
await showGasUsage(
await gateway["depositERC20(address,uint256,uint256)"](L1_WSTETH, amountIn, 1e6, { value: fee }),
"L1LidoGateway.depositERC20 wstETH before upgrade"
);
await showGasUsage(
await router["depositERC20(address,uint256,uint256)"](L1_WSTETH, amountIn, 1e6, { value: fee }),
"L1GatewayRouter.depositERC20 wstETH before upgrade"
);
// do upgrade
await upgradeL1(L1_GATEWAY, impl.address);
await gateway.initializeV2(deployer.address, deployer.address, deployer.address, deployer.address);
// after upgrade
await showGasUsage(
await gateway["depositERC20(address,uint256,uint256)"](L1_WSTETH, amountIn, 1e6, { value: fee }),
"L1LidoGateway.depositERC20 wstETH after upgrade"
);
await showGasUsage(
await router["depositERC20(address,uint256,uint256)"](L1_WSTETH, amountIn, 1e6, { value: fee }),
"L1GatewayRouter.depositERC20 wstETH after upgrade"
);
});
});
context("L2 upgrade", async () => {
let forkBlock: number;
let router: L2GatewayRouter;
let messenger: L2ScrollMessenger;
beforeEach(async () => {
// fork network
const provider = new ethers.providers.JsonRpcProvider("https://rpc.scroll.io");
if (!forkBlock) {
forkBlock = (await provider.getBlockNumber()) - 31;
}
await network.provider.request({
method: "hardhat_reset",
params: [
{
forking: {
jsonRpcUrl: "https://rpc.scroll.io",
blockNumber: forkBlock,
},
},
],
});
await network.provider.request({
method: "hardhat_impersonateAccount",
params: ["0x1100000000000000000000000000000000000011"],
});
// mock eth balance
deployer = await ethers.getSigner("0x1100000000000000000000000000000000000011");
await mockETHBalance(ethers.utils.parseEther("1000"));
// mock owner of proxy admin
proxyAdmin = await ethers.getContractAt("ProxyAdmin", "0xA76acF000C890b0DD7AEEf57627d9899F955d026", deployer);
await ethers.provider.send("hardhat_setStorageAt", [
proxyAdmin.address,
"0x0",
ethers.utils.hexlify(ethers.utils.zeroPad(deployer.address, 32)),
]);
expect(await proxyAdmin.owner()).to.eq(deployer.address);
router = await ethers.getContractAt("L2GatewayRouter", L2_ROUTER, deployer);
messenger = await ethers.getContractAt("L2ScrollMessenger", L2_MESSENGER, deployer);
});
const upgradeL2 = async (proxy: string, impl: string) => {
await proxyAdmin.upgrade(proxy, impl);
const L2ScrollMessenger = await ethers.getContractFactory("L2ScrollMessenger", deployer);
await proxyAdmin.upgrade(L2_MESSENGER, (await L2ScrollMessenger.deploy(L1_MESSENGER, L2_MESSAGE_QUEUE)).address);
};
it.skip("should succeed on L2ETHGateway", async () => {
const L1_GATEWAY = "0x7F2b8C31F88B6006c382775eea88297Ec1e3E905";
const L2_GATEWAY = "0x6EA73e05AdC79974B931123675ea8F78FfdacDF0";
const L2ETHGateway = await ethers.getContractFactory("L2ETHGateway", deployer);
const impl = await L2ETHGateway.deploy(L1_GATEWAY, L2_ROUTER, L2_MESSENGER);
const gateway = await ethers.getContractAt("L2ETHGateway", L2_GATEWAY, deployer);
const amountIn = ethers.utils.parseEther("1");
// before upgrade
await showGasUsage(
await gateway["withdrawETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn }),
"L2ETHGateway.withdrawETH before upgrade"
);
await showGasUsage(
await router["withdrawETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn }),
"L2GatewayRouter.withdrawETH before upgrade"
);
await showGasUsage(
await messenger["sendMessage(address,uint256,bytes,uint256)"](deployer.address, amountIn, "0x", 1e6, {
value: amountIn,
}),
"L2ScrollMessenger.sendMessage before upgrade"
);
// do upgrade
await upgradeL2(L2_GATEWAY, impl.address);
// after upgrade
await showGasUsage(
await gateway["withdrawETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn }),
"L2ETHGateway.withdrawETH after upgrade"
);
await showGasUsage(
await router["withdrawETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn }),
"L2GatewayRouter.withdrawETH after upgrade"
);
await showGasUsage(
await messenger["sendMessage(address,uint256,bytes,uint256)"](deployer.address, amountIn, "0x", 1e6, {
value: amountIn,
}),
"L2ScrollMessenger.sendMessage after upgrade"
);
});
it.skip("should succeed on L2WETHGateway", async () => {
const L1_WETH = "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2";
const L2_WETH = "0x5300000000000000000000000000000000000004";
const L1_GATEWAY = "0x7AC440cAe8EB6328de4fA621163a792c1EA9D4fE";
const L2_GATEWAY = "0x7003E7B7186f0E6601203b99F7B8DECBfA391cf9";
const L2WETHGateway = await ethers.getContractFactory("L2WETHGateway", deployer);
const impl = await L2WETHGateway.deploy(L2_WETH, L1_WETH, L1_GATEWAY, L2_ROUTER, L2_MESSENGER);
const gateway = await ethers.getContractAt("L2WETHGateway", L2_GATEWAY, deployer);
const amountIn = ethers.utils.parseEther("1");
const token = await ethers.getContractAt("MockERC20", L2_WETH, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 0);
await token.approve(L2_GATEWAY, constants.MaxUint256);
await token.approve(L2_ROUTER, constants.MaxUint256);
// before upgrade
await showGasUsage(
await gateway["withdrawERC20(address,uint256,uint256)"](L2_WETH, amountIn, 1e6),
"L2WETHGateway.withdrawERC20 WETH before upgrade"
);
await showGasUsage(
await router["withdrawERC20(address,uint256,uint256)"](L2_WETH, amountIn, 1e6),
"L2GatewayRouter.withdrawERC20 WETH before upgrade"
);
// do upgrade
await upgradeL2(L2_GATEWAY, impl.address);
// after upgrade
await showGasUsage(
await gateway["withdrawERC20(address,uint256,uint256)"](L2_WETH, amountIn, 1e6),
"L2WETHGateway.withdrawERC20 WETH after upgrade"
);
await showGasUsage(
await router["withdrawERC20(address,uint256,uint256)"](L2_WETH, amountIn, 1e6),
"L2GatewayRouter.withdrawERC20 WETH after upgrade"
);
});
it.skip("should succeed on L2StandardERC20Gateway", async () => {
const L2_USDT = "0xf55BEC9cafDbE8730f096Aa55dad6D22d44099Df";
const L1_GATEWAY = "0xD8A791fE2bE73eb6E6cF1eb0cb3F36adC9B3F8f9";
const L2_GATEWAY = "0xE2b4795039517653c5Ae8C2A9BFdd783b48f447A";
const L2StandardERC20Gateway = await ethers.getContractFactory("L2StandardERC20Gateway", deployer);
const impl = await L2StandardERC20Gateway.deploy(
L1_GATEWAY,
L2_ROUTER,
L2_MESSENGER,
"0x66e5312EDeEAef6e80759A0F789e7914Fb401484"
);
const gateway = await ethers.getContractAt("L2StandardERC20Gateway", L2_GATEWAY, deployer);
const amountIn = ethers.utils.parseUnits("1", 6);
const token = await ethers.getContractAt("MockERC20", L2_USDT, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 51);
await token.approve(L2_GATEWAY, constants.MaxUint256);
await token.approve(L2_ROUTER, constants.MaxUint256);
// before upgrade
await showGasUsage(
await gateway["withdrawERC20(address,uint256,uint256)"](L2_USDT, amountIn, 1e6),
"L2StandardERC20Gateway.withdrawERC20 USDT before upgrade"
);
await showGasUsage(
await router["withdrawERC20(address,uint256,uint256)"](L2_USDT, amountIn, 1e6),
"L2GatewayRouter.withdrawERC20 USDT before upgrade"
);
// do upgrade
await upgradeL2(L2_GATEWAY, impl.address);
// after upgrade
await showGasUsage(
await gateway["withdrawERC20(address,uint256,uint256)"](L2_USDT, amountIn, 1e6),
"L2StandardERC20Gateway.withdrawERC20 USDT after upgrade"
);
await showGasUsage(
await router["withdrawERC20(address,uint256,uint256)"](L2_USDT, amountIn, 1e6),
"L2GatewayRouter.withdrawERC20 USDT after upgrade"
);
});
it.skip("should succeed on L2CustomERC20Gateway", async () => {
const L2_DAI = "0xcA77eB3fEFe3725Dc33bccB54eDEFc3D9f764f97";
const L1_GATEWAY = "0x67260A8B73C5B77B55c1805218A42A7A6F98F515";
const L2_GATEWAY = "0xaC78dff3A87b5b534e366A93E785a0ce8fA6Cc62";
const L2CustomERC20Gateway = await ethers.getContractFactory("L2CustomERC20Gateway", deployer);
const impl = await L2CustomERC20Gateway.deploy(L1_GATEWAY, L2_ROUTER, L2_MESSENGER);
const gateway = await ethers.getContractAt("L2CustomERC20Gateway", L2_GATEWAY, deployer);
const amountIn = ethers.utils.parseUnits("1", 18);
const token = await ethers.getContractAt("MockERC20", L2_DAI, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 51);
await token.approve(L1_GATEWAY, constants.MaxUint256);
await token.approve(L1_ROUTER, constants.MaxUint256);
// before upgrade
await showGasUsage(
await gateway["withdrawERC20(address,uint256,uint256)"](L2_DAI, amountIn, 1e6),
"L2CustomERC20Gateway.withdrawERC20 DAI before upgrade"
);
await showGasUsage(
await router["withdrawERC20(address,uint256,uint256)"](L2_DAI, amountIn, 1e6),
"L2GatewayRouter.withdrawERC20 DAI before upgrade"
);
// do upgrade
await upgradeL2(L2_GATEWAY, impl.address);
// after upgrade
await showGasUsage(
await gateway["withdrawERC20(address,uint256,uint256)"](L2_DAI, amountIn, 1e6),
"L2CustomERC20Gateway.withdrawERC20 DAI after upgrade"
);
await showGasUsage(
await router["withdrawERC20(address,uint256,uint256)"](L2_DAI, amountIn, 1e6),
"L2GatewayRouter.withdrawERC20 DAI after upgrade"
);
});
it.skip("should succeed on L2USDCGateway", async () => {
const L1_USDC = "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48";
const L2_USDC = "0x06eFdBFf2a14a7c8E15944D1F4A48F9F95F663A4";
const L1_GATEWAY = "0xf1AF3b23DE0A5Ca3CAb7261cb0061C0D779A5c7B";
const L2_GATEWAY = "0x33B60d5Dd260d453cAC3782b0bDC01ce84672142";
const L2USDCGateway = await ethers.getContractFactory("L2USDCGateway", deployer);
const impl = await L2USDCGateway.deploy(L1_USDC, L2_USDC, L1_GATEWAY, L2_ROUTER, L2_MESSENGER);
const gateway = await ethers.getContractAt("L2USDCGateway", L2_GATEWAY, deployer);
const amountIn = ethers.utils.parseUnits("1", 6);
const token = await ethers.getContractAt("MockERC20", L2_USDC, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 9);
await token.approve(L2_GATEWAY, constants.MaxUint256);
await token.approve(L2_ROUTER, constants.MaxUint256);
// before upgrade
await showGasUsage(
await gateway["withdrawERC20(address,uint256,uint256)"](L2_USDC, amountIn, 1e6),
"L2USDCGateway.withdrawERC20 USDC before upgrade"
);
await showGasUsage(
await router["withdrawERC20(address,uint256,uint256)"](L2_USDC, amountIn, 1e6),
"L2GatewayRouter.withdrawERC20 USDC before upgrade"
);
// do upgrade
await upgradeL2(L2_GATEWAY, impl.address);
// after upgrade
await showGasUsage(
await gateway["withdrawERC20(address,uint256,uint256)"](L2_USDC, amountIn, 1e6),
"L2USDCGateway.withdrawERC20 USDC after upgrade"
);
await showGasUsage(
await router["withdrawERC20(address,uint256,uint256)"](L2_USDC, amountIn, 1e6),
"L2GatewayRouter.withdrawERC20 USDC after upgrade"
);
});
it.skip("should succeed on L2LidoGateway", async () => {
const L1_WSTETH = "0x7f39C581F595B53c5cb19bD0b3f8dA6c935E2Ca0";
const L2_WSTETH = "0xf610A9dfB7C89644979b4A0f27063E9e7d7Cda32";
const L1_GATEWAY = "0x6625C6332c9F91F2D27c304E729B86db87A3f504";
const L2_GATEWAY = "0x8aE8f22226B9d789A36AC81474e633f8bE2856c9";
const L2LidoGateway = await ethers.getContractFactory("L2LidoGateway", deployer);
const impl = await L2LidoGateway.deploy(L1_WSTETH, L2_WSTETH, L1_GATEWAY, L2_ROUTER, L2_MESSENGER);
const gateway = await ethers.getContractAt("L2LidoGateway", L2_GATEWAY, deployer);
const amountIn = ethers.utils.parseUnits("1", 6);
const token = await ethers.getContractAt("MockERC20", L2_WSTETH, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 51);
await token.approve(L2_GATEWAY, constants.MaxUint256);
await token.approve(L2_ROUTER, constants.MaxUint256);
// before upgrade
await showGasUsage(
await gateway["withdrawERC20(address,uint256,uint256)"](L2_WSTETH, amountIn, 1e6),
"L2LidoGateway.withdrawERC20 wstETH before upgrade"
);
await showGasUsage(
await router["withdrawERC20(address,uint256,uint256)"](L2_WSTETH, amountIn, 1e6),
"L2GatewayRouter.withdrawERC20 wstETH before upgrade"
);
// do upgrade
await upgradeL2(L2_GATEWAY, impl.address);
await gateway.initializeV2(deployer.address, deployer.address, deployer.address, deployer.address);
// after upgrade
await showGasUsage(
await gateway["withdrawERC20(address,uint256,uint256)"](L2_WSTETH, amountIn, 1e6),
"L2LidoGateway.withdrawERC20 wstETH after upgrade"
);
await showGasUsage(
await router["withdrawERC20(address,uint256,uint256)"](L2_WSTETH, amountIn, 1e6),
"L2GatewayRouter.withdrawERC20 wstETH after upgrade"
);
});
});
});

View File

@@ -17,10 +17,10 @@ describe("L1MessageQueue", async () => {
let oracle: L2GasPriceOracle;
let queue: L1MessageQueue;
const deployProxy = async (name: string, admin: string, args: any[]): Promise<string> => {
const deployProxy = async (name: string, admin: string): Promise<string> => {
const TransparentUpgradeableProxy = await ethers.getContractFactory("TransparentUpgradeableProxy", deployer);
const Factory = await ethers.getContractFactory(name, deployer);
const impl = args.length > 0 ? await Factory.deploy(...args) : await Factory.deploy();
const impl = await Factory.deploy();
await impl.deployed();
const proxy = await TransparentUpgradeableProxy.deploy(impl.address, admin, "0x");
await proxy.deployed();
@@ -34,20 +34,16 @@ describe("L1MessageQueue", async () => {
const admin = await ProxyAdmin.deploy();
await admin.deployed();
queue = await ethers.getContractAt(
"L1MessageQueue",
await deployProxy("L1MessageQueue", admin.address, [messenger.address, scrollChain.address, gateway.address]),
deployer
);
queue = await ethers.getContractAt("L1MessageQueue", await deployProxy("L1MessageQueue", admin.address), deployer);
oracle = await ethers.getContractAt(
"L2GasPriceOracle",
await deployProxy("L2GasPriceOracle", admin.address, []),
await deployProxy("L2GasPriceOracle", admin.address),
deployer
);
await oracle.initialize(21000, 50000, 8, 16);
await queue.initialize(messenger.address, scrollChain.address, constants.AddressZero, oracle.address, 10000000);
await queue.initialize(messenger.address, scrollChain.address, gateway.address, oracle.address, 10000000);
});
context("auth", async () => {
@@ -82,6 +78,22 @@ describe("L1MessageQueue", async () => {
});
});
context("#updateEnforcedTxGateway", async () => {
it("should revert, when non-owner call", async () => {
await expect(queue.connect(signer).updateEnforcedTxGateway(constants.AddressZero)).to.revertedWith(
"Ownable: caller is not the owner"
);
});
it("should succeed", async () => {
expect(await queue.enforcedTxGateway()).to.eq(gateway.address);
await expect(queue.updateEnforcedTxGateway(deployer.address))
.to.emit(queue, "UpdateEnforcedTxGateway")
.withArgs(gateway.address, deployer.address);
expect(await queue.enforcedTxGateway()).to.eq(deployer.address);
});
});
context("#updateMaxGasLimit", async () => {
it("should revert, when non-owner call", async () => {
await expect(queue.connect(signer).updateMaxGasLimit(0)).to.revertedWith("Ownable: caller is not the owner");

View File

@@ -1,97 +0,0 @@
/* eslint-disable node/no-missing-import */
/* eslint-disable node/no-unpublished-import */
import { expect } from "chai";
import { randomBytes } from "crypto";
import { BigNumber, Contract } from "ethers";
import { ethers } from "hardhat";
import fs from "fs";
import PoseidonWithoutDomain from "circomlib/src/poseidon_gencontract";
import { generateABI, createCode } from "../scripts/poseidon";
describe("PoseidonHash.spec", async () => {
// test against with circomlib's implementation.
context("domain = zero", async () => {
let poseidonCircom: Contract;
let poseidon: Contract;
beforeEach(async () => {
const [deployer] = await ethers.getSigners();
const PoseidonWithoutDomainFactory = new ethers.ContractFactory(
PoseidonWithoutDomain.generateABI(2),
PoseidonWithoutDomain.createCode(2),
deployer
);
poseidonCircom = await PoseidonWithoutDomainFactory.deploy();
await poseidonCircom.deployed();
const PoseidonWithDomainFactory = new ethers.ContractFactory(generateABI(2), createCode(2), deployer);
poseidon = await PoseidonWithDomainFactory.deploy();
await poseidon.deployed();
});
it("should succeed on zero inputs", async () => {
expect(await poseidonCircom["poseidon(uint256[2])"]([0, 0])).to.eq(
await poseidon["poseidon(uint256[2],uint256)"]([0, 0], 0)
);
});
it("should succeed on random inputs", async () => {
for (let bytes = 1; bytes <= 32; ++bytes) {
for (let i = 0; i < 5; ++i) {
const a = randomBytes(bytes);
const b = randomBytes(bytes);
expect(await poseidonCircom["poseidon(uint256[2])"]([a, b])).to.eq(
await poseidon["poseidon(uint256[2],uint256)"]([a, b], 0)
);
expect(await poseidonCircom["poseidon(uint256[2])"]([a, 0])).to.eq(
await poseidon["poseidon(uint256[2],uint256)"]([a, 0], 0)
);
expect(await poseidonCircom["poseidon(uint256[2])"]([0, b])).to.eq(
await poseidon["poseidon(uint256[2],uint256)"]([0, b], 0)
);
}
}
});
});
// test against with scroll's go implementation.
context("domain = nonzero", async () => {
let poseidonCircom: Contract;
let poseidon: Contract;
beforeEach(async () => {
const [deployer] = await ethers.getSigners();
const PoseidonWithoutDomainFactory = new ethers.ContractFactory(
PoseidonWithoutDomain.generateABI(2),
PoseidonWithoutDomain.createCode(2),
deployer
);
poseidonCircom = await PoseidonWithoutDomainFactory.deploy();
await poseidonCircom.deployed();
const PoseidonWithDomainFactory = new ethers.ContractFactory(generateABI(2), createCode(2), deployer);
poseidon = await PoseidonWithDomainFactory.deploy();
await poseidon.deployed();
});
it("should succeed on zero inputs", async () => {
expect(await poseidon["poseidon(uint256[2],uint256)"]([0, 0], 6)).to.eq(
BigNumber.from("17848312925884193353134534408113064827548730776291701343555436351962284922129")
);
expect(await poseidon["poseidon(uint256[2],uint256)"]([0, 0], 7)).to.eq(
BigNumber.from("20994231331856095272861976502721128670019193481895476667943874333621461724676")
);
});
it("should succeed on random inputs", async () => {
const lines = String(fs.readFileSync("./integration-test/testdata/poseidon_hash_with_domain.data")).split("\n");
for (const line of lines) {
const [domain, a, b, hash] = line.split(" ");
expect(await poseidon["poseidon(uint256[2],uint256)"]([a, b], domain)).to.eq(BigNumber.from(hash));
}
});
});
});

View File

@@ -12,31 +12,24 @@ describe("ScrollChain", async () => {
beforeEach(async () => {
const [deployer] = await ethers.getSigners();
const EmptyContract = await ethers.getContractFactory("EmptyContract", deployer);
const empty = await EmptyContract.deploy();
await empty.deployed();
const ProxyAdmin = await ethers.getContractFactory("ProxyAdmin", deployer);
const admin = await ProxyAdmin.deploy();
await admin.deployed();
const TransparentUpgradeableProxy = await ethers.getContractFactory("TransparentUpgradeableProxy", deployer);
const queueProxy = await TransparentUpgradeableProxy.deploy(empty.address, admin.address, "0x");
await queueProxy.deployed();
const chainProxy = await TransparentUpgradeableProxy.deploy(empty.address, admin.address, "0x");
await chainProxy.deployed();
const L1MessageQueue = await ethers.getContractFactory("L1MessageQueue", deployer);
const queueImpl = await L1MessageQueue.deploy(constants.AddressZero, chainProxy.address, deployer.address);
const queueImpl = await L1MessageQueue.deploy();
await queueImpl.deployed();
await admin.upgrade(queueProxy.address, queueImpl.address);
const queueProxy = await TransparentUpgradeableProxy.deploy(queueImpl.address, admin.address, "0x");
await queueProxy.deployed();
queue = await ethers.getContractAt("L1MessageQueue", queueProxy.address, deployer);
const ScrollChain = await ethers.getContractFactory("ScrollChain", deployer);
const chainImpl = await ScrollChain.deploy(0, queueProxy.address, deployer.address);
const chainImpl = await ScrollChain.deploy(0);
await chainImpl.deployed();
await admin.upgrade(chainProxy.address, chainImpl.address);
queue = await ethers.getContractAt("L1MessageQueue", queueProxy.address, deployer);
const chainProxy = await TransparentUpgradeableProxy.deploy(chainImpl.address, admin.address, "0x");
await chainProxy.deployed();
chain = await ethers.getContractAt("ScrollChain", chainProxy.address, deployer);
await chain.initialize(queue.address, constants.AddressZero, 100);

View File

@@ -15,7 +15,7 @@ describe("ZkEvmVerifierV1", async () => {
beforeEach(async () => {
[deployer] = await ethers.getSigners();
const bytecode = hexlify(fs.readFileSync("./src/libraries/verifier/plonk-verifier/plonk_verifier_0.9.8.bin"));
const bytecode = hexlify(fs.readFileSync("./src/libraries/verifier/plonk-verifier/plonk_verifier_0.5.1.bin"));
const tx = await deployer.sendTransaction({ data: bytecode });
const receipt = await tx.wait();
@@ -25,15 +25,47 @@ describe("ZkEvmVerifierV1", async () => {
});
it("should succeed", async () => {
const proof = hexlify(fs.readFileSync("./integration-test/testdata/plonk_verifier_0.9.8_proof.data"));
const instances = fs.readFileSync("./integration-test/testdata/plonk_verifier_0.9.8_pi.data");
const proof = hexlify(fs.readFileSync("./integration-test/testdata/plonk_verifier_0.5.1_proof.data"));
const instances = fs.readFileSync("./integration-test/testdata/plonk_verifier_0.5.1_pi.data");
const publicInputHash = new Uint8Array(32);
for (let i = 0; i < 32; i++) {
publicInputHash[i] = instances[i * 32 + 31];
}
expect(hexlify(publicInputHash)).to.eq("0x31b430667bc9e8a8b7eda5e5c76f2250c64023f5f8e0689ac9f4e53f5362da66");
// chunk1: https://github.com/scroll-tech/test-traces/blob/674ad743beab04b57da369fa5958fb6824155bfe/erc20/1_transfer.json
// 0000000000000005 blockNumber
// 0000000064c3ca7c timestamp
// 0000000000000000000000000000000000000000000000000000000000000000 baseFee
// 00000000007a1200 gasLimit
// 0001 numTransactions
// 8da3fedb103b6da8ccc2514094336d1a76df166238f4d8e8558fbe54cce2516a tx hash 0
// chunk2: https://github.com/scroll-tech/test-traces/blob/674ad743beab04b57da369fa5958fb6824155bfe/erc20/10_transfer.json
// 0000000000000006 blockNumber
// 0000000064c3ca7f timestamp
// 0000000000000000000000000000000000000000000000000000000000000000 baseFee
// 00000000007a1200 gasLimit
// 000a numTransactions
// 419164c1a7213e4e52f8578463c47a01549f69a7ff220d93221ce02909f5b919 tx hash 0
// 6c1b03d1a9b5156e189ad2e7ba73ba71d9a83b24f9830f38dd7a597fe1e67167 tx hash 1
// 94f981938d02b2c1d91ff370b3ed759dadc617c7347cd4b8552b275edbffd767 tx hash 2
// bfe98147fc808a916bdff90e838e77609fd59634787443f6fc58f9a371790d09 tx hash 3
// beb9dd0259e7c4f0a8d5ac3ba6aa3940c3e53947395f64e8ee88c7067c6d210e tx hash 4
// 208c6c767356552ad8085fa77a99d9154e0c8cf8777e329cb76bcbc969d21fca tx hash 5
// 37c8969833fbc6cbb88a63ccef324d7b42d0607ac0094f14e1f6d4e50f84d87f tx hash 6
// 088c5ad45a990694ac783207fe6bda9bf97da40e1f3eb468c73941d51b99932c tx hash 7
// c3d8ddbdfc67877a253255b9357aabfd062ce80d39eba67547f964c288660065 tx hash 8
// ff26ca52c02b97b1a6677263d5d6dec0321fb7b49be44ae0a66ba5482b1180b4 tx hash 9
// => chunk 0 data hash: 9390886a7d22aa43aae87e62a350c904fabc5db4487d9b25bdca446ba7ed15a1
// => chunk 1 data hash: a8846bf9bc53f30a391ae452b5fd456cb86a99ab7bd2e1e47898ffbe3509e8eb
// => batch data hash: ee64d77c2f2e0b2c4ac952a0f54fdba4a217c42eb26a07b28de9fbc7b009acae
// 000000000000cf55 layer2ChainId
// 02040e949809e8d2e56d35b4dfb876e08ee7b4608d22f23f52052425857c31ba prevStateRoot
// 1532cdb7732da0a4ca3044914c6959b7e2b7ba4e913a9f5f0b55051e467412d9 postStateRoot
// 0000000000000000000000000000000000000000000000000000000000000000 withdrawRoot
// ee64d77c2f2e0b2c4ac952a0f54fdba4a217c42eb26a07b28de9fbc7b009acae batchDataHash
// public input hash: 9ea439164727042e029464a40901e52800095c1ade301b63b4b7453880f5723e
expect(hexlify(publicInputHash)).to.eq("0x9ea439164727042e029464a40901e52800095c1ade301b63b4b7453880f5723e");
// verify ok
await zkEvmVerifier.verify(proof, publicInputHash);

View File

@@ -5,9 +5,7 @@ import { concat } from "ethers/lib/utils";
import { ethers } from "hardhat";
import { MockZkTrieVerifier } from "../typechain";
import { generateABI, createCode } from "../scripts/poseidon";
const chars = "0123456789abcdef";
import poseidonUnit from "circomlib/src/poseidon_gencontract";
interface ITestConfig {
block: number;
@@ -22,245 +20,170 @@ interface ITestConfig {
const testcases: Array<ITestConfig> = [
{
// curl -H "content-type: application/json" -X POST --data '{"id":0,"jsonrpc":"2.0","method":"eth_getProof","params":["0x5300000000000000000000000000000000000004", ["0x8391082587ea494a8beba02cc40273f27e5477a967cd400736ac46950da0b378"], "0x1111ad"]}' https://rpc.scroll.io
block: 1118637,
desc: "WETH.balance[0xa7994f02237aed2c116a702a8f5322a1fb325b31]",
block: 95216,
desc: "contract with storage",
account: "0x5300000000000000000000000000000000000004",
storage: "0x8391082587ea494a8beba02cc40273f27e5477a967cd400736ac46950da0b378",
expectedRoot: "0x1334a21a74914182745c1f5142e70b487262096784ae7669186657462c01b103",
expectedValue: "0x00000000000000000000000000000000000000000000000000006239b5a2c000",
storage: "0x9505174b0709a2a1997fe9797cb89648a93f17ce0096cbc1a6ed52b73170b96a",
expectedRoot: "0x2dc794537b959b575dc216cd11389d802f9389fce7183278561a824aa8e950e2",
expectedValue: "0x00000000000000000000000000000000000000000000000111346048bf18a14a",
accountProof: [
"0x0907d980105678a2007eb5683d850f36a9caafe6e7fd3279987d7a94a13a360d3a1478f9a4c1f8c755227ee3544929bb0d7cfa2d999a48493d048ff0250bb002ab",
"0x092b59a024f142555555c767842c4fcc3996686c57699791fcb10013f69ffd9b2507360087cb303767fd43f2650960621246a8d205d086e03d9c1626e4aaa5b143",
"0x091f876342916ac1d5a14ef40cfc5644452170b16d1b045877f303cd52322ba1e00ba09f36443c2a63fbd7ff8feeb2c84e99fde6db08fd8e4c67ad061c482ff276",
"0x09277b3069a4b944a45df222366aae727ec64efaf0a8ecb000645d0eea3a3fa93609b925158cc04f610f8c616369094683ca7a86239f49e97852aa286d148a3913",
"0x092fb789200a7324067934da8be91c48f86c4e6f35fed6d1ce8ae4d7051f480bc0074019222c788b139b6919dfbc9d0b51f274e0ed3ea03553b8db30392ac05ce4",
"0x092f79da8f9f2c3a3a3813580ff18d4619b95f54026b2f16ccbcca684d5e25e1f52912fa319d9a7ba537a52cc6571844b4d1aa99b8a78cea6f686a6279ade5dcae",
"0x09249d249bcf92a369bd7715ec63a4b29d706a5dbb304efd678a2e5d7982e7fa9b202e3225c1031d83ab62d78516a4cbdbf2b22842c57182e7cb0dbb4303ac38c5",
"0x0904837ebb85ceccab225d4d826fe57edca4b00862199b91082f65dfffa7669b90039c710273b02e60c2e74eb8b243721e852e0e56fa51668b6362fd920f817cb7",
"0x090a36f6aabc3768a05dd8f93667a0eb2e5b63d94b5ce27132fb38d13c56d49cb4249c2013daee90184ae285226271f150f6a8f74f2c85dbd0721c5f583e620b10",
"0x091b82f139a06af573e871fdd5f5ac18f17c568ffe1c9e271505b371ad7f0603e716b187804a49d2456a0baa7c2317c14d9aa7e58ad64df38bc6c1c7b86b072333",
"0x0929668e59dfc2e2aef10194f5d287d8396e1a897d68f106bdb12b9541c0bab71d2bf910dea11e3209b3feff88d630af46006e402e935bc84c559694d88c117733",
"0x0914231c92f09f56628c10603dc2d2120d9d11b27fa23753a14171127c3a1ee3dd0d6b9cbd11d031fe6e1b650023edc58aa580fa4f4aa1b30bf82e0e4c7a308bb9",
"0x0914c1dd24c520d96aac93b7ef3062526067f1b15a080c482abf449d3c2cde781b195eb63b5e328572090319310914d81b2ca8350b6e15dc9d13e878f8c28c9d52",
"0x0927cb93e3d9c144a5a3653c5cf2ed5940d64f461dd588cd192516ae7d855e9408166e85986d4c9836cd6cd822174ba9db9c7a043d73e86b5b2cfc0a2e082894c3",
"0x090858bf8a0119626fe9339bd92116a070ba1a66423b0f7d3f4666b6851fdea01400f7f51eb22df168c41162d7f18f9d97155d87da523b05a1dde54e7a30a98c31",
"0x0902776c1f5f93a95baea2e209ddb4a5e49dd1112a7f7d755a45addffe4a233dad0d8cc62b957d9b254fdc8199c720fcf8d5c65d14899911e991b4530710aca75e",
"0x091d7fde5c78c88bbf6082a20a185cde96a203ea0d29c829c1ab9322fc3ca0ae3100ef7cba868cac216d365a0232ad6227ab1ef3290166bc6c19b719b79dbc17fc",
"0x091690160269c53c6b74337a00d02cb40a88ea5eba06e1942088b619baee83279e12d96d62dda9c4b5897d58fea40b5825d87a5526dec37361ec7c93a3256ea76d",
"0x091bccb091cde3f8ca7cfda1df379c9bfa412908c41037ae4ec0a20ce984e2c9a51d02c109d2e6e25dc60f10b1bc3b3f97ca1ce1aa025ce4f3146de3979403b99e",
"0x0927083540af95e57acba69671a4a596f721432549b8760941f4251e0dd7a013a917cee0f60d333cf88e40ae8710fb1fd6e3920346a376b3ba6686a4b2020a043e",
"0x082170b57b8f05f6990eec62e74cdb303741f6c464a85d68582c19c51e53f490000a5029a62ddc14c9c07c549db300bd308b6367454966c94b8526f4ceed5693b2",
"0x0827a0b16ef333dcfe00610d19dc468b9e856f544c9b5e9b046357e0a38aedaeb90000000000000000000000000000000000000000000000000000000000000000",
"0x06126f891e8753e67c5cbfa2a67e9d71942eab3a88cde86e97a4af94ea0dde497821fb69ccdb00e6eaeaf7fc1e73630f39f846970b72ac801e396da0033fb0c247",
"0x0420e9fb498ff9c35246d527da24aa1710d2cc9b055ecf9a95a8a2a11d3d836cdf050800000000000000000000000000000000000000000000000016ef00000000000000000000000000000000000000000000000000000000000000600058d1a5ce14104d0dedcaecaab39b6e22c2608e40af67a71908e6e97bbf4a43c59c4537140c25a9e8c4073351c26b9831c1e5af153b9be4713a4af9edfdf32b58077b735e120f14136a7980da529d9e8d3a71433fc9dc5aa8c01e3a4eb60cb3a4f9cf9ca5c8e0be205300000000000000000000000000000000000004000000000000000000000000",
"0x001988ce414103e97cb80613adde47d5fe0611b30087d1cfcdb84284c42907467e24ac744be2edcb86cdfc42a9bbb7b2a270649161c3ce3a41d3ad5a26927d2c79",
"0x0028db7c407cab6652f1f194401bd87bda33c9a1723b4f93515bd5929cad02668123fa5a3e69136c8e03a62c805f89c9d3578a6f5fac4bb281fc4d7df12fbcc5db",
"0x000376d1bfe3d5c6afffb5707a34003209c57fbf15430daf0f8022b4df2bb947460ab4fda7be343efd34af2420e8e9d4268f436cb7700a005086df4eba083407c8",
"0x0025df09dd66dd9d8b5b1abb82cee9985a2addd12e7f5671c910e27644ccaf498c2a2d7021169172e380831f43a00f0a3bef8576c7c74ac98fd7e7b1ec443ac92e",
"0x00218d51f8e754bf89062007dd765b50b7385bbb4a57db258ac8dcf9ad69b6f4552ddc5a17cec74d8e8f06e16c0a6112023c34d6c001060bc783ab4d06a4a9801a",
"0x001166c2eedfbbb4568ec27c57b2729437c0c8c38161fad643f03f76fbd807e712286d86bfdceb6729daedb6f219dd0f6080386d9a2a8f9c1dcb89792c8754e125",
"0x0028fd666ed406e277f6496bcac13af8b303b58c74be937399095399f4dd141c6f2876f81684c2546ff90b221ba2fe1290e671770af08fd545868e3351401b1503",
"0x000b9245c7ccc1eab305d40cced5e8aac6c8ddb877451075185bb7a6c1a4973a5d2852ce761c8e417a5f604a6ef4196ec101014aa1d1e4c684d1b5b8cbec5c37b1",
"0x0019755e50ef22e13ae17cbc33d9e708ee9efc011941b3a920bc65da9825b04eb029a43488e5584b68d1a98a215f03f31e063734a3305600f9feed11607271d0d3",
"0x002e10cc0afbf5b336e6a6eeae0c863df7a7c2ba61c599618fb973aeff397918e523b18c08a19fa6bc964ae41c56af610ab43d948db94ad2543e9807a5a0f1d2f0",
"0x00247f3f0cebebf749e27c8ffd81e9919cab114bd3d75029e3260e99b6c7fe551d06a69531144f521b68d1a2c7450f5a20146efdaf7b47271782bb8746a023cf84",
"0x0029ad88f0ee7198edcae37ab88efb2a27ea8956d6b988264b227843c175743c4329916ead363e6adfc27f400977d2d9efb1f896616a18d71e2702ec8201b82c57",
"0x002a1de55ee84561850354085516a1101705f8240b8f1e1f9aea3a464650d637a52fad2de438ac5851b0e28508af90bd385dbcad5df8ea23ca78792f094ff7ca0d",
"0x001ba118afa1977f1fda1411cd1c7f145ab97a35b1e724060d5cfc3d58b27141ee2b0a8dbf3d494c6a1bf6456c4de00de8e2f0d9be0716a3ca78d4df28948f975b",
"0x0025bdbf508c1e3808415136bfdd6dfb548c33348d813882b0d405913405d575010c60f95c658dc8113f7c97935a35d78c23dba131c25866fc8d93920e318d2450",
"0x0007bc3ec4d80df884c4d87f4541ffa522046a4c52e6cccb9ff7376ff56149e5d21b87a56676f679f4b8b4478c8a3aa80a09127258cccd4aa373a5c7c2344d2d03",
"0x010aef26efde9e4bca477d460482bce3de3577f6e9a280dea6d3f9985b4151deab0508000000000000000000000000000000000000000000000000071d0000000000000000000000000000000000000000000000000000000000000013328350573dd32b38291529042b30b83bf20bfc7e18ab6a9755e2ea692d5a7644f896b0d629cf9740d72ccbc90dd6141deb3fab132f1ebc17ab963c612c7123d5a524d0158cc8291b081281272d79459760d885ea652024615d55b114b5872571b21aee99977b8681205300000000000000000000000000000000000004000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449",
],
storageProof: [
"0x09240ea2601c34d792a0a5a8a84d8e501cfdfdf2c10ef13ea560acac58661882dd1b3644d1d4f3e32fc78498a7ebeffac8c6a494ac6f36923ef1be476444c0d564",
"0x0912af3ac8f8ea443e6d89d071fccaa2b3c8462220c1c2921234f613b41594f08f2a170e61f5f436b536c155b438044cf0d0f24b94b4c034ad22b3eae824998243",
"0x0916011d547d7a54929c3515078f4f672c6b390ccdd4119f0776376910bc5a38da1a059ed9c504fadcc9f77e8a402175743bee1f5be27b7002b0f6c5b51070452c",
"0x09017285edc268d979eb410b46627e541afda16cdb3577ce04c15dc14cc6609c60143f0c01e71e99b2efbe3d8e62a2c812889aa9fd88dd4b0ed8eadcf1ec9b096a",
"0x0922901e65200b007ad8e1b972e90403b336e459e0cf9b9d68732da345b1b0f6872c9e3f3edacbd857b26d0a66a80aa56c6ebaa9849e9ea5a2b17fd59cabe138e4",
"0x091b77a00164a72880eec6c18fc043fa99f922e20bbee156e1ebfd3a358bee6bbb24d97cfaa234befe197a567476cade91b7d97a1017b8d5286dae4dddadffe1cd",
"0x09216f1c4d67a9a428885bb8d978ad369d2d69d4dcc1692c3a0c3ea05da7d6f0ac2d6dda722e76eb513c67718e7be0478851758be5547322473a53b5b2b67faf95",
"0x091f56c6f18ceb7077125df1ed17a42a85956090594125c1b182161de20f8af6aa2e36977412f9ea2ad2c0951153969eca8408317558ff1b6b4ad731726235f606",
"0x092ca197dda6c519d80296f4fcda2933df9608ec684ad000133259024041d070812d29b058a998cf7ffc647b2739041725d77889f58953799c6aba6d9e5b981fc8",
"0x091c25a87d321a09ad2a149d1a7eaa77727c7feffb4c39caf44f8edd4377f7bd0c16d1091494d3c90d301c1cb4596692798e78e4cc3d53c3a08e2641de43f9da18",
"0x092166058c98245eb85b08da1c569df11f86b00cc44212a9a8ee0d60556d05a8030942c68b535651e11af38264ecc89e5f79b66c3d9ce87233ad65d4894a3d1c3d",
"0x0908c3b13b7400630170baec7448c7ec99fa9100cad373e189e42aca121e2c8f450f9e40d92d98bb0b1286a18581591fddfa8637fc941c1630237293d69e5cb98f",
"0x091362d251bbd8b255d63cd91bcfc257b8fb3ea608ce652784e3db11b22ca86c0122a0068fa1f1d54f313bed9fd9209212af3f366e4ff28092bf42c4abebffe10a",
"0x081d67961bb431a9da78eb976fabd641e20fbf4b7e32eb3faac7dfb5abb50f1faf1438d77000c1cf96c9d61347e1351eb0200260ebe523e69f6e9f334ec86e6b58",
"0x0819324d2488778bdef23319a6832001ee85f578cc920670c81f3645f898a46ec62e00385c4416ca4ccbab237b13396e5e25e5da12101021c6a6f9ecfe7c7fed19",
"0x041421380c36ea8ef65a9bdb0202b06d1e03f52857cdfea3795463653eaa3dd7d80101000000000000000000000000000000000000000000000000000000006239b5a2c000208391082587ea494a8beba02cc40273f27e5477a967cd400736ac46950da0b378",
"0x000a52b818e0a009930d62c17f2b1244179b7c14f8e1ae317fb3bfd3a3ba6060031b2a4aa2df31e79f926474987eea69aab84f4581cfd61b0338438110f6be145b",
"0x001684ff1ef6ea054c5a6a5cae45f9280dacfc10c6cde39d1f64a00ad3c77549fe1c14ff8a628c0244ba48d63610e5d0b514c1b7b60301b6f27f77a435caf8bd60",
"0x001a2ba0ad7d6447d3c2476aa2e6bd04ab552ac1840450ce11f338f58a80fcdf420df4b9fc89108a0a44d844d981abe44d5ab20a5a101d07e94d131f07bf83ba62",
"0x0007158ec8942174c68bde0ab3666eb29b3c5784693bbfcd21126789d98bbdd05409f0313df8ddc438abe4798854f30c9daa2274950ce833a2de21e09b8b2c11b2",
"0x000ab27b84c73e447618f030ad9d621b0d61cc783e7ae5671ffcd3ff479b5093fe173d6126fa71986aa679b5384a2dc25f3a15f806a546e933f9fda6ac0a3460d9",
"0x0024ca9a7c6b7bf77c7a7acdae9d8e551b08ec6adf30abb7d1c45a6bbd5058ea921802170d5cc7de7d294cf6c67b0ac0208fe76497803554fb5bba9f78721568eb",
"0x0018a60c68b26022ced26cce2be1af1d6b33f4c16596d1ba18d5f47fea98ae490b12e66678391e289de1cf981c122e765265b94f0669614d94847480a77c2d3b74",
"0x001a776d5e5902c9a073c86a71ee80d167d6e2eb92150df2afb3d87f18b2cce6f02af158ba1cfbc643b36c1e001b59473cc88663b44c8d70739a27b804ec387146",
"0x0012cd2c1070b0d2eb215eb760fba9b843bd5c732102ce9773701076b0e37a437e136901c4ddc1cdbef42f46af629296ca5965b41a53cce65237612cea27477076",
"0x002bf94aa1fcb474365039e949bbbeabe0162ffc490b1b63ffe0f84bf182a8bf16169fe345e742d176a80f6e733177736d93e40fc9fdd4866efa6cc45ad94e9577",
"0x001a2e6e1b585fa0564fc606c3d62c26d9a113d75430966ff3f500e450c762edeb24fb1e5456ed4313d9418a1b073ae8b3f852f0f8435752bbbe65d21726ddb873",
"0x002529704fb28f7d3f9d2f3e9d38b000b6bfc2a21cb0a1955797016536066307d70ba7397326ecf50b98153f9e3baa96608efdf7d772b1ff28649bef677860dba9",
"0x0022f4f22a1d85ac83a56e7031559cf874c78a2f2ee6b6b93625f588313964a6d0052f6c873c6417d409c2a5317b31449b36fb4faede558d03b448b06b4a198daa",
"0x0017167b295954b29f62d7347dab3158aedc8586d5aa233d3f69c14bc7fe31eb840000000000000000000000000000000000000000000000000000000000000000",
"0x002d7bed0c0f0318a6fc60f903f4a42841cc4fa431ddf1a97fc34f35d6a267434b2a1a818d75328089a9578143e31b1f535517e09ff50a728b100483e712c8bc9a",
"0x0126ae15b478408eb45ea8b6f61aad1345f2b6257efd1acc4a6024b26f664c98240101000000000000000000000000000000000000000000000000000111346048bf18a14a209505174b0709a2a1997fe9797cb89648a93f17ce0096cbc1a6ed52b73170b96a",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449",
],
},
{
// curl -H "content-type: application/json" -X POST --data '{"id":0,"jsonrpc":"2.0","method":"eth_getProof","params":["0x5300000000000000000000000000000000000004", ["0x0000000000000000000000000000000000000000000000000000000000000002"], "0x1111ad"]}' https://rpc.scroll.io
block: 1118637,
desc: "WETH.totalSupply",
account: "0x5300000000000000000000000000000000000004",
block: 95216,
desc: "contract with empty storage node",
account: "0xb75d7e84517e1504c151b270255b087fd746d34c",
storage: "0x0000000000000000000000000000000000000000000000000000000000000002",
expectedRoot: "0x1334a21a74914182745c1f5142e70b487262096784ae7669186657462c01b103",
expectedValue: "0x0000000000000000000000000000000000000000000000600058d1a5ce14104d",
accountProof: [
"0x0907d980105678a2007eb5683d850f36a9caafe6e7fd3279987d7a94a13a360d3a1478f9a4c1f8c755227ee3544929bb0d7cfa2d999a48493d048ff0250bb002ab",
"0x092b59a024f142555555c767842c4fcc3996686c57699791fcb10013f69ffd9b2507360087cb303767fd43f2650960621246a8d205d086e03d9c1626e4aaa5b143",
"0x091f876342916ac1d5a14ef40cfc5644452170b16d1b045877f303cd52322ba1e00ba09f36443c2a63fbd7ff8feeb2c84e99fde6db08fd8e4c67ad061c482ff276",
"0x09277b3069a4b944a45df222366aae727ec64efaf0a8ecb000645d0eea3a3fa93609b925158cc04f610f8c616369094683ca7a86239f49e97852aa286d148a3913",
"0x092fb789200a7324067934da8be91c48f86c4e6f35fed6d1ce8ae4d7051f480bc0074019222c788b139b6919dfbc9d0b51f274e0ed3ea03553b8db30392ac05ce4",
"0x092f79da8f9f2c3a3a3813580ff18d4619b95f54026b2f16ccbcca684d5e25e1f52912fa319d9a7ba537a52cc6571844b4d1aa99b8a78cea6f686a6279ade5dcae",
"0x09249d249bcf92a369bd7715ec63a4b29d706a5dbb304efd678a2e5d7982e7fa9b202e3225c1031d83ab62d78516a4cbdbf2b22842c57182e7cb0dbb4303ac38c5",
"0x0904837ebb85ceccab225d4d826fe57edca4b00862199b91082f65dfffa7669b90039c710273b02e60c2e74eb8b243721e852e0e56fa51668b6362fd920f817cb7",
"0x090a36f6aabc3768a05dd8f93667a0eb2e5b63d94b5ce27132fb38d13c56d49cb4249c2013daee90184ae285226271f150f6a8f74f2c85dbd0721c5f583e620b10",
"0x091b82f139a06af573e871fdd5f5ac18f17c568ffe1c9e271505b371ad7f0603e716b187804a49d2456a0baa7c2317c14d9aa7e58ad64df38bc6c1c7b86b072333",
"0x0929668e59dfc2e2aef10194f5d287d8396e1a897d68f106bdb12b9541c0bab71d2bf910dea11e3209b3feff88d630af46006e402e935bc84c559694d88c117733",
"0x0914231c92f09f56628c10603dc2d2120d9d11b27fa23753a14171127c3a1ee3dd0d6b9cbd11d031fe6e1b650023edc58aa580fa4f4aa1b30bf82e0e4c7a308bb9",
"0x0914c1dd24c520d96aac93b7ef3062526067f1b15a080c482abf449d3c2cde781b195eb63b5e328572090319310914d81b2ca8350b6e15dc9d13e878f8c28c9d52",
"0x0927cb93e3d9c144a5a3653c5cf2ed5940d64f461dd588cd192516ae7d855e9408166e85986d4c9836cd6cd822174ba9db9c7a043d73e86b5b2cfc0a2e082894c3",
"0x090858bf8a0119626fe9339bd92116a070ba1a66423b0f7d3f4666b6851fdea01400f7f51eb22df168c41162d7f18f9d97155d87da523b05a1dde54e7a30a98c31",
"0x0902776c1f5f93a95baea2e209ddb4a5e49dd1112a7f7d755a45addffe4a233dad0d8cc62b957d9b254fdc8199c720fcf8d5c65d14899911e991b4530710aca75e",
"0x091d7fde5c78c88bbf6082a20a185cde96a203ea0d29c829c1ab9322fc3ca0ae3100ef7cba868cac216d365a0232ad6227ab1ef3290166bc6c19b719b79dbc17fc",
"0x091690160269c53c6b74337a00d02cb40a88ea5eba06e1942088b619baee83279e12d96d62dda9c4b5897d58fea40b5825d87a5526dec37361ec7c93a3256ea76d",
"0x091bccb091cde3f8ca7cfda1df379c9bfa412908c41037ae4ec0a20ce984e2c9a51d02c109d2e6e25dc60f10b1bc3b3f97ca1ce1aa025ce4f3146de3979403b99e",
"0x0927083540af95e57acba69671a4a596f721432549b8760941f4251e0dd7a013a917cee0f60d333cf88e40ae8710fb1fd6e3920346a376b3ba6686a4b2020a043e",
"0x082170b57b8f05f6990eec62e74cdb303741f6c464a85d68582c19c51e53f490000a5029a62ddc14c9c07c549db300bd308b6367454966c94b8526f4ceed5693b2",
"0x0827a0b16ef333dcfe00610d19dc468b9e856f544c9b5e9b046357e0a38aedaeb90000000000000000000000000000000000000000000000000000000000000000",
"0x06126f891e8753e67c5cbfa2a67e9d71942eab3a88cde86e97a4af94ea0dde497821fb69ccdb00e6eaeaf7fc1e73630f39f846970b72ac801e396da0033fb0c247",
"0x0420e9fb498ff9c35246d527da24aa1710d2cc9b055ecf9a95a8a2a11d3d836cdf050800000000000000000000000000000000000000000000000016ef00000000000000000000000000000000000000000000000000000000000000600058d1a5ce14104d0dedcaecaab39b6e22c2608e40af67a71908e6e97bbf4a43c59c4537140c25a9e8c4073351c26b9831c1e5af153b9be4713a4af9edfdf32b58077b735e120f14136a7980da529d9e8d3a71433fc9dc5aa8c01e3a4eb60cb3a4f9cf9ca5c8e0be205300000000000000000000000000000000000004000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449",
],
storageProof: [
"0x09240ea2601c34d792a0a5a8a84d8e501cfdfdf2c10ef13ea560acac58661882dd1b3644d1d4f3e32fc78498a7ebeffac8c6a494ac6f36923ef1be476444c0d564",
"0x0912af3ac8f8ea443e6d89d071fccaa2b3c8462220c1c2921234f613b41594f08f2a170e61f5f436b536c155b438044cf0d0f24b94b4c034ad22b3eae824998243",
"0x0916011d547d7a54929c3515078f4f672c6b390ccdd4119f0776376910bc5a38da1a059ed9c504fadcc9f77e8a402175743bee1f5be27b7002b0f6c5b51070452c",
"0x092293af71b7b9315c32d08f06e291b85e3b3dbba786dd31952369f666281aa21125ab35feae70aaca9349f6af48f7dcf2dee0324e4eae03e929963e7728b633a3",
"0x090607033a4b976c1e4683298d66b88a95ed45033ff43dea0670d84a8c42d35bf12562869385c0e70f561f18be4b78e7276b837f140a45ab12ffef1ba4ad5faecb",
"0x090abc5f713c2f58583114bb5081d00cbd01789d8efbd95e471b151c71c475142f0f52ad30f8a63288eb9dd12aca2a670de08c03f8384f55d730c943e1c472625b",
"0x0905156e8704d6195f6ae562aed2072f4e32422c6dfd4840ca354b9c4d2de5ce760fca52b1e0689ad374bae9fbea262a929f919695149a083fe6bacb806dc02fca",
"0x0917078d4c193a3fdbfe8ce3a235a0e1df89e626b5e91636097e299883fc2447892ad46eefbb27909544fe02c05e29760315749f6ce21c17c52158f5f5616c2dad",
"0x0917d02e5da8bdb969149c9327b247a6aaa479bcda4a03665da5103c10e616d2f40ccabdacdd25b34235d26e50e7af5d8d312a2cafdcadd41cc589a71a322f254c",
"0x090c62f5c476c1def8ed8a8c25ae54581690b39dfab4b0f3f78b93df96f626714328ea922a76a058087563bb5370664e9a1cebe3062f2d904bf5e3a018219d6563",
"0x091e481971f770e587b1f62f1da9ac4687abc5b2a23097fc38332e15ab957ca0ab0ec0a95c15313887e0d2f166c100deaf17f2ce50767680e6e5b2e3068801c0cd",
"0x0911799e186f1bd299dfa08c07404b9d28e2b179fb6ad523f1846872537b6db85f198b573ac1397048258de38b391fcc5e0c86a0f81f4ca607785fb37041ab8b4d",
"0x092053a028cf3bfcdabcb58985efc39f078cb0bcae4439528a0b6fe4b24bbdbd2c019a04a54e9e96077f3c2c39c1602a83387018b6357ea4c28e96764865d1c8f3",
"0x07303fad3e4628ccae4de1adb41996c9f38b22445b6525ff163b4c68cbde275b1a06111cae9b4d17b730d94f589e20c6ae2cb59bf0b40ad05bf58703ee6d46eac4",
"0x0606bc3fca1f1b3c877aa01a765c18db8b0d7f0bc50bd99f21223055bf1595c84d04fdc0fd416d8402fde743d908d032a20af6f2e65cdc6cc289f72c04f1c2476f",
"0x04020953ad52de135367a1ba2629636216ed5174cce5629d11b5d97fe733f07dcc010100000000000000000000000000000000000000000000000000600058d1a5ce14104d200000000000000000000000000000000000000000000000000000000000000002",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449",
],
},
{
// curl -H "content-type: application/json" -X POST --data '{"id":0,"jsonrpc":"2.0","method":"eth_getProof","params":["0x5300000000000000000000000000000000000004", ["0x0000000000000000000000000000000000000000000000000000000000002222"], "0x1111ad"]}' https://rpc.scroll.io
block: 1118637,
desc: "random empty storage in WETH",
account: "0x5300000000000000000000000000000000000004",
storage: "0x0000000000000000000000000000000000000000000000000000000000002222",
expectedRoot: "0x1334a21a74914182745c1f5142e70b487262096784ae7669186657462c01b103",
expectedRoot: "0x2dc794537b959b575dc216cd11389d802f9389fce7183278561a824aa8e950e2",
expectedValue: "0x0000000000000000000000000000000000000000000000000000000000000000",
accountProof: [
"0x0907d980105678a2007eb5683d850f36a9caafe6e7fd3279987d7a94a13a360d3a1478f9a4c1f8c755227ee3544929bb0d7cfa2d999a48493d048ff0250bb002ab",
"0x092b59a024f142555555c767842c4fcc3996686c57699791fcb10013f69ffd9b2507360087cb303767fd43f2650960621246a8d205d086e03d9c1626e4aaa5b143",
"0x091f876342916ac1d5a14ef40cfc5644452170b16d1b045877f303cd52322ba1e00ba09f36443c2a63fbd7ff8feeb2c84e99fde6db08fd8e4c67ad061c482ff276",
"0x09277b3069a4b944a45df222366aae727ec64efaf0a8ecb000645d0eea3a3fa93609b925158cc04f610f8c616369094683ca7a86239f49e97852aa286d148a3913",
"0x092fb789200a7324067934da8be91c48f86c4e6f35fed6d1ce8ae4d7051f480bc0074019222c788b139b6919dfbc9d0b51f274e0ed3ea03553b8db30392ac05ce4",
"0x092f79da8f9f2c3a3a3813580ff18d4619b95f54026b2f16ccbcca684d5e25e1f52912fa319d9a7ba537a52cc6571844b4d1aa99b8a78cea6f686a6279ade5dcae",
"0x09249d249bcf92a369bd7715ec63a4b29d706a5dbb304efd678a2e5d7982e7fa9b202e3225c1031d83ab62d78516a4cbdbf2b22842c57182e7cb0dbb4303ac38c5",
"0x0904837ebb85ceccab225d4d826fe57edca4b00862199b91082f65dfffa7669b90039c710273b02e60c2e74eb8b243721e852e0e56fa51668b6362fd920f817cb7",
"0x090a36f6aabc3768a05dd8f93667a0eb2e5b63d94b5ce27132fb38d13c56d49cb4249c2013daee90184ae285226271f150f6a8f74f2c85dbd0721c5f583e620b10",
"0x091b82f139a06af573e871fdd5f5ac18f17c568ffe1c9e271505b371ad7f0603e716b187804a49d2456a0baa7c2317c14d9aa7e58ad64df38bc6c1c7b86b072333",
"0x0929668e59dfc2e2aef10194f5d287d8396e1a897d68f106bdb12b9541c0bab71d2bf910dea11e3209b3feff88d630af46006e402e935bc84c559694d88c117733",
"0x0914231c92f09f56628c10603dc2d2120d9d11b27fa23753a14171127c3a1ee3dd0d6b9cbd11d031fe6e1b650023edc58aa580fa4f4aa1b30bf82e0e4c7a308bb9",
"0x0914c1dd24c520d96aac93b7ef3062526067f1b15a080c482abf449d3c2cde781b195eb63b5e328572090319310914d81b2ca8350b6e15dc9d13e878f8c28c9d52",
"0x0927cb93e3d9c144a5a3653c5cf2ed5940d64f461dd588cd192516ae7d855e9408166e85986d4c9836cd6cd822174ba9db9c7a043d73e86b5b2cfc0a2e082894c3",
"0x090858bf8a0119626fe9339bd92116a070ba1a66423b0f7d3f4666b6851fdea01400f7f51eb22df168c41162d7f18f9d97155d87da523b05a1dde54e7a30a98c31",
"0x0902776c1f5f93a95baea2e209ddb4a5e49dd1112a7f7d755a45addffe4a233dad0d8cc62b957d9b254fdc8199c720fcf8d5c65d14899911e991b4530710aca75e",
"0x091d7fde5c78c88bbf6082a20a185cde96a203ea0d29c829c1ab9322fc3ca0ae3100ef7cba868cac216d365a0232ad6227ab1ef3290166bc6c19b719b79dbc17fc",
"0x091690160269c53c6b74337a00d02cb40a88ea5eba06e1942088b619baee83279e12d96d62dda9c4b5897d58fea40b5825d87a5526dec37361ec7c93a3256ea76d",
"0x091bccb091cde3f8ca7cfda1df379c9bfa412908c41037ae4ec0a20ce984e2c9a51d02c109d2e6e25dc60f10b1bc3b3f97ca1ce1aa025ce4f3146de3979403b99e",
"0x0927083540af95e57acba69671a4a596f721432549b8760941f4251e0dd7a013a917cee0f60d333cf88e40ae8710fb1fd6e3920346a376b3ba6686a4b2020a043e",
"0x082170b57b8f05f6990eec62e74cdb303741f6c464a85d68582c19c51e53f490000a5029a62ddc14c9c07c549db300bd308b6367454966c94b8526f4ceed5693b2",
"0x0827a0b16ef333dcfe00610d19dc468b9e856f544c9b5e9b046357e0a38aedaeb90000000000000000000000000000000000000000000000000000000000000000",
"0x06126f891e8753e67c5cbfa2a67e9d71942eab3a88cde86e97a4af94ea0dde497821fb69ccdb00e6eaeaf7fc1e73630f39f846970b72ac801e396da0033fb0c247",
"0x0420e9fb498ff9c35246d527da24aa1710d2cc9b055ecf9a95a8a2a11d3d836cdf050800000000000000000000000000000000000000000000000016ef00000000000000000000000000000000000000000000000000000000000000600058d1a5ce14104d0dedcaecaab39b6e22c2608e40af67a71908e6e97bbf4a43c59c4537140c25a9e8c4073351c26b9831c1e5af153b9be4713a4af9edfdf32b58077b735e120f14136a7980da529d9e8d3a71433fc9dc5aa8c01e3a4eb60cb3a4f9cf9ca5c8e0be205300000000000000000000000000000000000004000000000000000000000000",
"0x001988ce414103e97cb80613adde47d5fe0611b30087d1cfcdb84284c42907467e24ac744be2edcb86cdfc42a9bbb7b2a270649161c3ce3a41d3ad5a26927d2c79",
"0x0028db7c407cab6652f1f194401bd87bda33c9a1723b4f93515bd5929cad02668123fa5a3e69136c8e03a62c805f89c9d3578a6f5fac4bb281fc4d7df12fbcc5db",
"0x0006801926f00b574e3a88162d192482fecba9918b77e133dd77587d9efaf5c7861712d244ac8ad4bc0bffe0dbe8ab261865c9a69b4b7769e9c188ec048460ce78",
"0x002f3161746c2c70c7cefb74c07bc16b28bd9011343f5c6f8756471cd0b184601a25d05d5447a572452964b3c20f40ef841bf313c958e82a6923584e20496df67f",
"0x000efef3e3e174da6f3f451b5a2652d2489fff449a217c10841e68e4a15995d6521c4b1552c592020fbc7219c5d67ff00bd630db8102ce5c6ca12bea29b80ba5e5",
"0x0019b4749b17792c0ad9f7b460a0faf35400da9423be38ac5c40e81c805acc72592c0b933e1c25d05db98d98fc4f04b27610b2ee88281126099aed42f27cd96b00",
"0x002b8d563c5041f28afa38da01b6ec9e7278250be79f7f55e2586955e75ab75fad2055ea72cd44209c41c94ddfb980fe5b007b3e997085bc1fe5b514f72f860c05",
"0x001335698617876fcc272740f765d53d53ee511dc9dc33965aaa0a5584f2f0fc02274c435ba9cc0fd5b897350de8cc1837d3a2baaa54ef3f9c66f689f20eddaf1a",
"0x0010f766b8dbe13e3f27f45da3ad7e5c31fd1c11c51f4f892851519182cdc9348921c10d83a16e057f99623dcd68ab28a78e48b655df756245631521d04e85e583",
"0x002bb5fce9df47073438d61ee438d900ab4ab01ac7f053e57c6ffe3e8f1746285016a600e6b7ee90281bbc3bd9b9523a663261cda2208ae98efcf76df8c965fb76",
"0x002cad2eb5194b59d880565b03cd667a842923c1310a60bd818685c8fe4120d86817ee8bfffdb490f78f23d6fb38bb1c27f10f877c5017b8b2c21ad14f23df0eab",
"0x001f064044ca94d6f30ef93ee1bb6ae35450acf1c8f5b113b0f0ff39e4b65cfb9a25141ae7fc30c69000991e65c626c1b12fb76bca02c68f8116d15698a5934b71",
"0x0014382fa3481f424cc33c39f77fd3df54c5951be347c629ab5baec238e46cab050b2b8bec8ebdbc97dd6c0ab867aae5746e51b69b7b8177c96dbc0c4531521d3e",
"0x0011941db7a46d1a3ddbd27a4a57a0ce1865b6e224552b233d9d545745489257f408c8e3a0a147e117dbb89827018a2df52d124cee29e82b15643e4877cabe4d06",
"0x0000d7b8f99e5f148297bf4bf7e5133f87dbdf1932dbb152e0cb14c472c7d26f26146c4f72b903bb98b0855c1ca5bef4bada14a773dcda341d10402004e999d757",
"0x0104eeb1fce36df4d3f6423137af3855d16bc936184295529c58682bb5217d64d905080000000000000000000000000000000000000000000000000867000000000000000130644e72e131a029b85045b68181585d2833e84879b96ea2850beb8e012d423615fd9926356a5b1f3a4599c7cccd6df3b45097b6527756e572b90fc8c40496f831f2125c021fb94759cb1993a2f07eae01792311e13f209441ff8969cf1eb8351cafbbe8f01ed4c292d9a27be523919a274441a076b20c7d713d192dbe6485c220b75d7e84517e1504c151b270255b087fd746d34c000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449",
],
storageProof: [
"0x09240ea2601c34d792a0a5a8a84d8e501cfdfdf2c10ef13ea560acac58661882dd1b3644d1d4f3e32fc78498a7ebeffac8c6a494ac6f36923ef1be476444c0d564",
"0x092fa31ba6c9b8f291512a582ab446daf7aa3787e68f9628d08ec0db329027d9001af83d361b481ed4b943d988cb0191c350b8efc85cfceba74afb60783488d441",
"0x092c2ec2d967208cb5088400d826b52113d606435be011b6c9f721f293fb12242515681c9016eb1c222dcdbeeeb9fd3a504caba892f4c1832741a2b17a7305598a",
"0x090c7fe825c29bf5df80c7101ff8a372ba4f7b2ac37c16a3bbda38cc1e38e682460499b7e5d21d3784f496e747140f465eb1a39a019d2be8baf13a5e39f359a4ed",
"0x092bb11ebbc7cd1e565b86498aecab16842ab3fa852c7943cfbc49ee4bc593b2f308a78e1bc555e07d36d5c812af57c18f67199197a52ff74bc4e32ca6b7fadf32",
"0x092fd1e042080801034c6d6c79d462016c74b97dfbb1272cf606e638911a08f21c02434541eeed6d66002c69042f9354211e40518316a2d98cc0da0f19fb1ea013",
"0x09024bd491ec707bc3e8bea6b2754f37b1e85903061aefabd945537eef2f4d38b4136b925b004d29603c5e6195e073322d27f0c6ea3fa1ea5c5b248ff60dda594c",
"0x09269e1f468bd9bbde77a13562645a80a77d26d801781ca95d385bd59ee1b0890b03694bf9043190620265bf0bc3baa4d82cc82302ae0bbf33cfa48b0ec9d5ab25",
"0x0924d8bf62b2a725684847208dc021d5aee9f3c8f14c14786bc9f93232dfd3e068120bb7d022bbb159b4b84bb9e36cd2fcd89d761e265c1b88c8bdb9745a51cb22",
"0x092680f932920fd86de0b417cfdbeb2836a470213097ed5abb1a2b4deba8437f6825fd0ec614b97e6cfa4d50b08ad1e0fd8a5cd72db3a468128d1045d6a54e5e6e",
"0x0909e630914cee4db538057a0218a72288b88b2603aee0f805254b865a03de87c92ce46c1aa77ee8c42bb60c4175826f4dbb89d6282c01ff3de654c961599e66c3",
"0x091a17302d53ad1b7a4472d111fd27b35720d49ce27259b5e42f46339dddf235e82b973c29f44cf69b589f724d7d2fa54bf38b37bde3fc66c0d965a8c10df80caa",
"0x0916572156ae22ae2b0bc84ff41d16668be7163da26db2b13b86c218e0516c97a4131b584b7192464dde26060f66f678b03c8db8f64f1cd7a1f98a22a90cce5850",
"0x092c6ee2ca598c123445bbbd403ca3ab8a95ce2443f941ebdcf7bb035e2a3e38e22e8d5b222a1019b126f0ecf277c7fed881413e879cd4dc5df66634b6e9fb688d",
"0x0700000000000000000000000000000000000000000000000000000000000000002822301c27c0bd26a8f361545a09d509a2feed981accf780de30244f0300321d",
"0x05",
"0x000c180cb3d57f72eb405dfc667d167967e4709cf3722a87b4c924f78a1d8fa9e926d16eb1f4902f8ac7a48fdf98274c9c4061f9f14f783e2fb41ef50c53d5f8ad",
"0x000f78c968ee196c478c91d12a48edfde6c630d40203652c6420ff5aa3619549a4297615606d62866169d509f77c9cb38751ae282cafdc27caf891585b383b4795",
"0x000798716960783afdcfd0749aa3b316d6e3d6ec2724853e629b42b5a9a10208e02e5f5fe3d5b8b823d3481aa1e738a1a24d6d1a63116e0003044672d73a7df2e4",
"0x0014748f61c4954d239225204b4611d66384f08ef03db3da82957fd590ee00b6c92b873e4bd217f8dfb0fa29bca1087ac7bc29db616a6830ba456091bab772ac06",
"0x000a1c900952239e98f5f1a3009e623bf6cf533d3b0d6d13d28d04f0496761927c0be199ff86f081ebb1c413e850450a4cce01dfd2c455156d7abde31385ae2ab8",
"0x00028d4e89bc6ce55b5e6bba0f2f3758dafcdb4722e6c1a06f6faa8bae065bc8ae0644641c0ac696c265b3ec90889e3842c9a7a5902f1a5e807c5767ed49106982",
"0x001e8434bf68ee6077d88efb5449ad286455a522e63a6bce5544cf785b77a5842d041a4e324bc47aa8ae42b56446f687758a8091986b6d760fd283a9e097a64e3a",
"0x00250bc6ba916a2acb3ce53053a88be40b815fa749d144dc709a7a46a08361e83c05b2b5b05f45324ab921e04ae1278371ebe1e092203259f4e5306eb46ad50f8c",
"0x0011c208e2c536c37674b1ecafff0261146c326c939544781da7062bbd0ac2fbca246f5225dc41e9fc17fe531f5bdc3325620e4003b3310a2cf7e31011b19c68a2",
"0x001dc8d4177945ac89a3c61977ed787e50c9d8a0c5d85dd6b1409ec11213b324e6228005b222573db7882205be776a5bd2183944b6fcf63af604e31b9285bd010e",
"0x0014ba74da33d2ca27e3f78bc1bd052c2b92176ce4136df751a8229051de383c2b0c8994f02704420f1f84963281364401d00f6d5aa9b6f52135bd96159c1c3b9b",
"0x00188c7ee45a6c28fa7ad49a86206b70764066b1888b0de90e4410d7132a641f8b0eecbba072e28ed6705379104e30dd2557c47b30be7dd5e8c893b8a641d02701",
"0x0010fb29a3bb8191eb03bd345ad1995bf6a57f09929f72dc8a9c42435c2eef734b1d565bfc8ae78d6c1496f2bdfeadff6890e8ddef4c6b730a5ec8575344800c90",
"0x001b2abe5a1352c492c3ac47d2ff93896977a99a0783eedadc6246efc9b4e78ab408291f4e9234e4662a365f40090e1b323e3448fa2f6cdc9c929477095499c323",
"0x00083b5711eb1cbba5e79c53227057d4987a22dd22b5ef715bf21f558917f48b17027f174fd4ca77e412ca65a7fbf6151e4473fa909ea384c7687b45f860d0103a",
"0x00100158ee54f61ba5b093a43a348cfd202c87ba1533af2b24fc2f068de89a8d15100f3cc72c206d05d44db4272bd67db89bc6e5c86d7c1b03b40395ec4661595c",
"0x002a15c17fcf2a10c6d1bcbd59ae262f80ad33518d499059a668e115045069ef012788a404ba41b5f8a96f0b294d0ba91e65b1bf58eee74adb8e55ca12f22fdccc",
"0x00031177585837e616bc830056a4bd12821c9c779096df361ebe1d77379e96ff9e0000000000000000000000000000000000000000000000000000000000000000",
"0x02",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449",
],
},
{
// curl -H "content-type: application/json" -X POST --data '{"id":0,"jsonrpc":"2.0","method":"eth_getProof","params":["0x5300000000000000000000000000000000000044", ["0x0000000000000000000000000000000000000000000000000000000000000000"], "0x1111ad"]}' https://rpc.scroll.io
block: 1154766,
desc: "random empty storage in some contract",
account: "0x226D078166C78e00ce5E97d8f18CDc408512bb0F",
storage: "0x0000000000000000000000000000000000000000000000000000000000000001",
expectedRoot: "0x1e5cf13822e052084c315e944ca84f1ef375583e85e1508055123a182e415fab",
expectedValue: "0x0000000000000000000000000000000000000000000000000000000000000000",
accountProof: [
"0x09062c633f6d7c7a157025fef8ab1c313a7caadda3a64b23664741f9de3b0478fe27571cf9b45d5f4deddf5f0b5354a613998fdcbe9249bb7cde92fd45513c5a99",
"0x0920d6877efe14060018278754e91682430401880981fec1cd1b63610bed0c1e332a63aca7a8898b01983e2c53a7257310318da444fd6c8b705e488943205301a8",
"0x090f6dadd53bbc0f5fa4fa03961aff0bf252ae335e11c1836253b6bc214d66759010b10d80991219a66f1eb7e07169b4cec4fa74b04edbdc08c3f238dfdf1d2fac",
"0x0921ea10af71b5f3587ff9d42178a151427cbcde37b8bee6575463bf6b83110cca0520d5f97b44e7015453ec16d9c28980d2cec3df5c860eb8a455f49dcfa339be",
"0x092d19cf96a7c129aac6f72f780703a9ef3233fc5124d592baee751a3550dd692a02c962b87efbba5aeea4856c3df29c1ea540e1fbc7a74529d5dc793fe8e490d8",
"0x0922e20a087e600560007189ccc1a159e4fffeb1876a6de3772b7f450793a1c6620ada74791f3ecd25a650701578ef9661c64e75d836c681503e96228974a53903",
"0x0924839671b636ebb56cb9a2860a3edf2a2875774e84dfcf8546135189f808d724260ac8be541ff088a9a1d2468c4c6e2faa793009be553a3cbca003649ee511db",
"0x090cd8140d844f62e44ffe820c1b2b0d4aa8f0518c15ff61759d93d805cb017cb628d5b46a4c4ec0a10eb00155808890925050f7af2279b512c25005d963283262",
"0x0913c0698673b0be011485eba05c61ac41bf14fc960ce5dbb6f5a021809eabbb0e18adaf85a3724e1a644268b845f5014b39e574928b9a01bfcd25d6fe1cf03e8f",
"0x0912c2e7da4b091c52e0012e5c13baf07d9d9daed10a558262d2e700a7c823300e054dce1849561bbeede4368a3be06f5a2bae06bdb1bc2bcefdba84634fd1991c",
"0x090b3e9c665497a0f9c1d3f1448c6d9144a287eb0accf86fea6f443f51986df7130392814f078a19643081787478ec3a010e2757a574877a194136c529813cf7ae",
"0x09249a0e273abe79a0b99a55516e19213191b7f77ef34f8815edc4e1ede8711f7920615adbac1983d844c8a6ed50922562432c13d030069d8b3e92611b4fe39531",
"0x09199575893e55d92fafb3b067130b9b6b5a46e7f6fb2d0af412d12591632dfe961adffb9dd1e7490095aac94bc1fcaeb591f4ba907fe2b882c9f6d8f7ab3a1809",
"0x09259308e9398f029ebbe31a4b353f474622b4c96995b7365c3b13c392fcc3e7001be60286a497a3886aa9cff3ad6a5dc71504078eb7a44c43530b7b33eef4743f",
"0x090709a21aaf18a1eaea3b925ab36f47a82095aa3e9ddbc4f01463005c4b64f6af0554d854637fcbfd9b1a4c2474de343950569e4f855d66f2ee14fcfb19ee17f5",
"0x092d7319be75a70b8ea5f0acc6ab4a96971ec546f72b18bdc3e905ad6ea8a288f70626499aee389335559b1dd3cc8b6711f9fde0c517236190cba24fa87993877a",
"0x09081b165a51e3081fc2e3e27d6fdb81134b65284851798de62899db3065a8c1fc040c8dce92508a510c2c34fc2949910dd41247c9f247cd216c03d9bb9d2881b4",
"0x092a27c5be32e1ab6e85d1ac094bc1509d92285f45c63fca6dba9b14d485a94af326d44c1ff85666a4790182ddd7e51cbbe06af81d62082e6d79faec29a4501369",
"0x091a46df6ffd6b439ffcd1b57e9548f5c4db26ade9e984efc8a91a01ab22134d3c1617b504ac2015793c5dac16d379b5ca6cb70c14243491bb68535ee686a3a553",
"0x08180e90f9f9a4fd8065a5849539793bd9e9340b69770eff1716a733241e454c341641f913f1c32e2c652b876f902e5c2c8d51c482411ec44dae969bdc50264c42",
"0x06273c162ecb059cd86ec0a01033dd61c39f59ee0a13eb41a28c0b2d49a45f6f94081be344adea9f54587a832b9efef6fc9ec010d86ec5fb2b53b5ff8dbabc4924",
"0x040b792f5b15327fc37390341af919c991641846d380397e4c73cbb1298921a546050800000000000000000000000000000000000000000000000000fb0000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000be74cc05824041ef286fd08582cdfacec7784a35af72f937acf64ade5073da10889249d61c3649abf8749bf686a73f708d67726fada3e071b03d4541da9156b20226d078166c78e00ce5e97d8f18cdc408512bb0f000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449",
],
storageProof: [
"0x05",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449",
],
},
{
// curl -H "content-type: application/json" -X POST --data '{"id":0,"jsonrpc":"2.0","method":"eth_getProof","params":["0xC73BfBD94fb1FD860997D4E76D116BDE0333BeEf", ["0x0000000000000000000000000000000000000000000000000000000000000000"], "0x2a7531"]}' https://sepolia-rpc.scroll.io
block: 2782513,
desc: "contract with only one storage entry",
account: "0xC73BfBD94fb1FD860997D4E76D116BDE0333BeEf",
block: 95216,
desc: "contract with no storage",
account: "0x9c0fc47d9346e2be1e24f6cef76149779fe52715",
storage: "0x0000000000000000000000000000000000000000000000000000000000000000",
expectedRoot: "0x13c6008daf17807163a056504e562d4adf13870306814b1a3877cda5297d5ae9",
expectedValue: "0x000000000000000000000000000000000000000000000000000000000000000c",
expectedRoot: "0x2dc794537b959b575dc216cd11389d802f9389fce7183278561a824aa8e950e2",
expectedValue: "0x0000000000000000000000000000000000000000000000000000000000000000",
accountProof: [
"0x09272d92cb48d19e41ef64be1da3e10026eb87d227132becb4fba0dd1451783de425f66c55ff0bec0b012e11d64aaaa6c322566d58cf45525cb05302132518f23d",
"0x0920908000907fe2260e41f8682510eee0572937459163ea1940c2eae8b2d5862e015e7c84f56f5948bfc9c242506d14f5c3c1b97bba1b262b40b108f5d7e69287",
"0x09078402c38a02e2b3dda819b761ca6448029f3dd42ae7876ac0dba0d762e3ddb818d80485f0a15f54f110aad9a98b00bdf9ccb56bbcb069552c7f6c10be1b9c15",
"0x09123243fe438606648fe3bcef5eb0165920315fb2b9316ce3ec0daac885577f190b84d9901fc150f52ed177f23ec31de4254b293c6eac2088009f3e13e3a08b78",
"0x09053c59663d3eafad212f58c4834090db4bfd0ba2b13d3108e0acade089a5da9229a75e0b30abc41d4fb252faf9f3aa8ef750b780247d83186cdc333635c25038",
"0x09163255ef0b1fdec7ec97c4e002cdeb6c963ca26d9d03ebdf78eb44dfdb57e4bd1fa9f68cc583c1e7019cc62133ede53e5636330de9a2c09e75f03760026e3729",
"0x09296d3cb1c4fd539ed015f2853649d20f5db111ce13c30b7e6efa4c9468741d1e0eea62adcf73aa5bdb4868cd776df429d26787f424beeda38f4ad19aa83e43e4",
"0x0908288df27fa423895de38ec5a52e809d99b683c5b32463501f5dad642b71387f0a3d37ae9df53b5cfdda0ac67765662e8a71a19b05d38f4a464596e129a35570",
"0x091a774fef4e8294fcca57d213846e51bfcf71249680e937e14248e532b47abd762ad72878f07f4abbba8bd13da9b75f681f35a748bb8fc078913e16a91bce783e",
"0x092799a146ba6b2bf4b6a24aef88c9590d9643d53f429438e348518a17af3d6e8d10e3b39898c3795c9386518438465581ca232445532fb549a8bddbdd6f4e0eed",
"0x0914c654d53c9f8656b784709decbd12ba786800a77c929f3b4255d59138b42dff282005f8997b73d64eeb112775885c4c08d2ee4e356cc2db58154dde48a0a1e4",
"0x091c71601a71f28ed0f6aeb59cf8a7bf29ce7dd3203352099920086e02007496260b811e85a0cd244d56f199b357d5c3a54f897fea21637698943679d07b33db8d",
"0x092a66de31cef7b4b195772a2b96edba3ca7d97a5bbe974d071c37f0d0ca0545be0be9ca0dd4c9d62ec3ba0a404713fefe6b62391ba3f6d283a47e83fdb18c3a4e",
"0x09093842042d196ae30784d31ed1526dd5d60cabe292eb5333e42936a2edbbaf1d237998efa424724063547c02cfa835ebfc24131315c34229b363f46fefda33ee",
"0x0911637da97122f89f421a4564d5328893ff4b5de123aecad06e07ea45d9622b87096a296e974b5eda0f2d35cb5531c4a55f3c1e181a8bb4a0b33399e7c93853d4",
"0x0921feeaba62a4ad78791d662737a3fa52a527dcd892f5d3af2cfbed4b6591d50f2fae639afb8ab4640a2d166616a4803442b26b9a8f5148a1c88adda1e2d911da",
"0x090ddbe424e9368f262ef80a144383fc4f518b27200f7a61a996a075b3b84ab5041c755907f230eea27d060fa827a5743c7046cd0dc7487047bc3a7d222d65d2d7",
"0x092d6e65349fd6751353b4b72fdd03d3ee4b1721efb434db76679c1c348b60fdc0177c7d961201138c98f85daf8a49b7a083a41e77dcd819d359b3db55c4a941a9",
"0x090b0d48518cb602b73a86bd7b2294d401e6ad4851e3c7549fc7d23eea017eadd72e3245236b50c7f256de16bae063df6221b8331443c9d3a79e867fd77dd85cee",
"0x07062bf32f202ec2afa65dfa84fffc76b5c05309768078544920f9b56d021606ce0b7371683425d088ad37f063ee847a9accac416314f1308cce69a8beeb2d2ab7",
"0x090ffc989b8556e69159e246cb74cf7a2e30df63e9b7dba76ede73996ab60d9799063ca19e1d436cea189d17c5d93b8da0fa11b3ee88de1030602d1e8087cbb3da",
"0x070000000000000000000000000000000000000000000000000000000000000000084f906a52b7da7bf35f3cc2431b40cfb90884c2ec0b579c9c096aea959509f7",
"0x0620b6c0072d699768c0b52df46b97dae979a14788ed54dad1d7ce67db6e036a07291784b726760c2d728e4084d95df6d1534e27284c8ae2eeb56a80210f37da2b",
"0x041245637ec55bae3c02f990e3cc3bf59cc05f515731cfa59ee55f8164953f8965050800000000000000000000000000000000000000000000000000ac000000000000000100000000000000000000000000000000000000000000000000000000000000000f68a43f5508e9c1f845406d9a507b612f97530746e59b93c8705f1a7cb0b93451e52f95aea13b1bc1f37dfbf797bfe7cea82a8c82da148f507e1ef2036fea8314b9fb07c4311e129d72b858c37b6bbe09c616f78416cb53d6e83360aff7b99c20c73bfbd94fb1fd860997d4e76d116bde0333beef000000000000000000000000",
"0x001988ce414103e97cb80613adde47d5fe0611b30087d1cfcdb84284c42907467e24ac744be2edcb86cdfc42a9bbb7b2a270649161c3ce3a41d3ad5a26927d2c79",
"0x0007af09eec4d7cc8903e99bd9fb9b8e57c30b0c3e34b17da769b01a9a1b943f391c4537228dbbfd7e7cce02123416bfdd61fb83577725516123b569eafcd8087d",
"0x0013a22efa6a843f6de1925fce2f6a83c7ed307182b16f661d0e7a8046561999393050830a440d2506adf42ccedece4e3aadc6bc80cea20fc1d8ed9e9c61597da0",
"0x001056a19427eac81b91de5db696812b3a0384bf41b37a12e9cbb7dc62404a102a1465c13c8d3721e137a64d9e5ba1267ac418339b3648bfab5a2a86f2343c2b4d",
"0x000794d2c0e19bc86772c2d1a43d46de87ad221847bddcfdffa19dbd34f3c3a9b507c5f198eb63c18640af5eff5480830147639cec070d276b778f21677d22ce32",
"0x000b23d93f98ec6e3536ffcab6afc6e2eb9b73aeb573d288723350366c05469e2e23837ffea9235351ee533af680d14011825e098f81ce3f8f59e9f08deff05e3d",
"0x002ad200ac8be8275ef12b8aeaec526d2b5255128968a2cd2ff775cab14e2ec4e907f2e9b849239e0e94332a50ac9d97320c56ca718c5e023cacd69f80b4c97c86",
"0x00284be135a2d7f5822a7949189b90696df39b1b183206c764576bf457df4fd1560204a9fc6c0dc199eecb404acfcabf4a633916fc94d2790dcd34959809c2195d",
"0x00270c2cd154aea3b575a1c7d47c62576bbdce6bbc7ccf5682e7962cf6cb77f0d317fdbac10917644860584c3057c750df695f529189f90910c30f114257719990",
"0x00174956df87889921e2a6ddb257fa84508fd7ea22c5a622b84378678e781a2289053dc6b3c4f91335b64f4b170bfe70bb5e2e316227b329d2b1205e7c62c4f755",
"0x002f9284ded18b8f281841094a93cb55b95884eec55d8eaa759c6175ddb2e037111c63bcee8ccf544fff55c3e502270e574d1f0b6265c4c7c6f42db5061b0120db",
"0x00065fdf05e66407d26a36a49d042c9c5e8cebab3baa2d3fd1ae6e673c3636cf7e2d9dbf3781e3f26f06fb503638a8bf00882f58dc83500338df4b7e08a290a5fb",
"0x00138987046c770f02f5d8e7d073f6c055536450fa55ccd2a23957598b6070297926f3a0b645072c5bd5c15cdcf03a4474e94d760e3a76fb8714b20b9d74608823",
"0x00280e7f8e278e02e43843aaba5a9a722a89af0ece06b5892284f825974e1c1984185be1fda9b5322a4c41023127eee438849ea23390e6c2d4d9abdedb5a1a43fc",
"0x00208f32072c6e20863710406ad34339da1124c639941e935818dd9ad9419849c91e0e37873df7eb190a2846789df889bbfd522200e2a41423ff9ab0acf2592be0",
"0x0005fb23491fabbc9b3eead71117b86a27952e8fd4b3380336ac3f479832e94bad109a1b6dca757696b8831d2529ffda29f37af36f92fec738376df77561491083",
"0x0028baee42b4a9a70b7ec1e50ea1a6817f812082a28598dca106aaecf2761fb63c06e5b589490c27f5cfc233890456ec47a7365ff2882a27c73968f4829d011b05",
"0x001708247f7a96b84cad27c31985cd39b6cc9435b4ec3f4db9aeed3311c213de651e2f271ae0fa011e5e6fccd121492400327efb915c95d85956a9cd27ceb4321a",
"0x0000000000000000000000000000000000000000000000000000000000000000002332e856217b3bab09901f1daa9ddc91edf56964e03675d260d00ffdf6e2e715",
"0x000571dce6fee951f457db89bae18abbd78b6b06504602a103133d2a38cabf5f5b1ecb13b03e3493e217c65da70baf4c4fad74808110658924869ba0e75d0871db",
"0x001738a6461148300d30699edb55d3b5bb62760aeb9384c07d61aa062c401f3a7d0000000000000000000000000000000000000000000000000000000000000000",
"0x000c14152707412177bbe1cfed882d7d7bdfca4e96be701a3c41bb3d254491f0bf0096ebc25015b9a40d4fe7490bda8ecb7f3a01e858d7833dce8f1993be4db07d",
"0x0117294cb69b0984b3a26d77eae252f9d8e438808bf276ee8c0c0546b7316c9bca05080000000000000000000000000000000000000000000000000ab1000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ed3aa0dd2cd363d4cea5d5283ec359f75be36a12ceddc7f80a58af9d39a418a02b6a0ff9eb34bf0e52f67047f95556a96c4f40822412da0c8bd0340996a754f4209c0fc47d9346e2be1e24f6cef76149779fe52715000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449",
],
storageProof: [
"0x041d3c5f8c36e5da873d45bfa1d2399a572ac77493ec089cbf88a37b9e9442842201010000000000000000000000000000000000000000000000000000000000000000000c200000000000000000000000000000000000000000000000000000000000000000",
"0x02",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449",
],
},
{
block: 95216,
desc: "EOA with balance",
account: "0x0384a6f7e2588bb251688f9ab8d10932a98e9f28",
storage: "0x0000000000000000000000000000000000000000000000000000000000000000",
expectedRoot: "0x2dc794537b959b575dc216cd11389d802f9389fce7183278561a824aa8e950e2",
expectedValue: "0x0000000000000000000000000000000000000000000000000000000000000000",
accountProof: [
"0x001988ce414103e97cb80613adde47d5fe0611b30087d1cfcdb84284c42907467e24ac744be2edcb86cdfc42a9bbb7b2a270649161c3ce3a41d3ad5a26927d2c79",
"0x0028db7c407cab6652f1f194401bd87bda33c9a1723b4f93515bd5929cad02668123fa5a3e69136c8e03a62c805f89c9d3578a6f5fac4bb281fc4d7df12fbcc5db",
"0x0006801926f00b574e3a88162d192482fecba9918b77e133dd77587d9efaf5c7861712d244ac8ad4bc0bffe0dbe8ab261865c9a69b4b7769e9c188ec048460ce78",
"0x002f3161746c2c70c7cefb74c07bc16b28bd9011343f5c6f8756471cd0b184601a25d05d5447a572452964b3c20f40ef841bf313c958e82a6923584e20496df67f",
"0x0007602275f17f6c339ec3febc879c2ca72efa782ff1888b04553f82333eb0e60c068c8e4fe6da32f7f80a4acb50b690a7204581e5e4b8e9e7daa115dfcb466ae1",
"0x000cb512d4ab158b5e7d5852cc7531788f11e64e5959cc1233d7a64eaaca36426116fea9120cf06c241843db50d81978b402281dfe15ba7d8a8c689bfbe0b31a1a",
"0x002eb4fff0642f7be6d8e95793d9371d606df48efd0b62a7eb01b0a9669307be2b0ee7d01463afc3dac441f66e675ba06fec67b692e3f7a46510d096836468a3cb",
"0x0003ea09dc5b0ca3ce2961d3200c09b837ea535447e3ba45e5583dbb4e9db48b2208abfec237c907584104b11444f55fa3fa7e6f6a5954817ecea6361516f0271b",
"0x001c654478a700ac0414f5cd8da557e04f9570939802c3963e801523f001ebb4d916d301b50f89760520da2a662b03a207e9372902153ba84ef0f5438472f466c6",
"0x0009f3b0d95ec5d88cfc2db19520f43d110d12c757a58ae7f578095de96e5d319d2c8f43a67b0c01008670f07eb53071b835f19cbb45d6e76281a083087217d988",
"0x000348f024d617f64de7be803547c109b98f833b090e8a3dea0c2bed201ce752c12a4fb71f098941741c42e156651d8a42632e3acbf6f14cd9763b50216af75d61",
"0x0029f85b49319fe7dfced69a258b1baf213d638fe3082b9a13f38e553e9d3269333054c4cb6d1e91bc2dfced1559b58cd6474ac6583a1fc5a2bef5eaa7b96ecea0",
"0x000a4d19e2ec5f98d9ccdc1e94d9334668b87ea451195f9a8319b98cfdb077c5ce1adc64852505188363c7e98b83501e876862d8ffbd8b4051f3cb6dde7f0e8afe",
"0x002568d5d87f19b2b3f2b7341ee61fb45f56dc76734beaa4f1a9865b80b9d9a7d500a191ba054a28841f25c34ad384817a2af2ebada6047517dbb2b6a1338e48c7",
"0x0027f6df1a3610c7447efd280fa6a949713456a1ba79b50dc7fb87c5cb3312b19311b3c9c4420874b02bdc1ea102dc77bb803c1a5042d565aea99054ae0eb816b2",
"0x0018a3d33e2c0d076ca4ddb093516d90cb8ba8b508e8d372d3a8a93aa9eef6079b138df6cb61c8f92dcbea8cd90ead1efa49f3a24f814c88a7bdca8fd83f4d0675",
"0x00268f3122e558d5084a1b3ffc293b67bd2436152fbee80566226d4a753b5b44c40b6d06e2f5f17009a7e146889c2f492b077a462d602e0e72f53373a154aa450e",
"0x0006c81bc9375fe1a0ebb75b151c8a321b85970c1a8a5aa7396a7076a4d6f26c8118a7e9e0987d7c6d0100180c9ba496db2b967f6acf7bc11d002314693416b3bf",
"0x011fb221b659992b8d98a645cb37666f934ded70f1f5d82dad67dace71d7191f8105080000000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000d8d6f2b3da41cda2e0000000000000000000000000000000000000000000000000000000000000000c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a4702098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864200384a6f7e2588bb251688f9ab8d10932a98e9f28000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449",
],
storageProof: [
"0x02",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449",
],
},
@@ -272,9 +195,13 @@ describe("ZkTrieVerifier", async () => {
beforeEach(async () => {
const [deployer] = await ethers.getSigners();
const PoseidonHashWithDomainFactory = new ethers.ContractFactory(generateABI(2), createCode(2), deployer);
const Poseidon2Elements = new ethers.ContractFactory(
poseidonUnit.generateABI(2),
poseidonUnit.createCode(2),
deployer
);
const poseidon = await PoseidonHashWithDomainFactory.deploy();
const poseidon = await Poseidon2Elements.deploy();
await poseidon.deployed();
const MockZkTrieVerifier = await ethers.getContractFactory("MockZkTrieVerifier", deployer);
@@ -282,17 +209,6 @@ describe("ZkTrieVerifier", async () => {
await verifier.deployed();
});
const shouldRevert = async (test: ITestConfig, reason: string, extra?: string) => {
const proof = concat([
`0x${test.accountProof.length.toString(16).padStart(2, "0")}`,
...test.accountProof,
`0x${test.storageProof.length.toString(16).padStart(2, "0")}`,
...test.storageProof,
extra || "0x",
]);
await expect(verifier.verifyZkTrieProof(test.account, test.storage, proof)).to.revertedWith(reason);
};
for (const test of testcases) {
it(`should succeed for block[${test.block}] desc[${test.desc}] account[${test.account}] storage[${test.storage}]`, async () => {
const proof = concat([
@@ -308,277 +224,193 @@ describe("ZkTrieVerifier", async () => {
});
}
it("should revert, when InvalidBranchNodeType", async () => {
it("should revert, when parent node invalid", async () => {
const test = testcases[0];
for (const i of [0, 1, test.accountProof.length - 3]) {
const correct = test.accountProof[i];
const prefix = correct.slice(0, 4);
for (let b = 0; b < 16; ++b) {
if (b >= 6 && b < 10) continue;
test.accountProof[i] = test.accountProof[i].replace(prefix, "0x" + chars[b >> 4] + chars[b % 16]);
await shouldRevert(test, "InvalidBranchNodeType");
test.accountProof[i] = correct;
}
}
test.accountProof[0] =
"0x010a52b818e0a009930d62c17f2b1244179b7c14f8e1ae317fb3bfd3a3ba6060031b2a4aa2df31e79f926474987eea69aab84f4581cfd61b0338438110f6be145b";
const proof = concat([
`0x${test.accountProof.length.toString(16).padStart(2, "0")}`,
...test.accountProof,
`0x${test.storageProof.length.toString(16).padStart(2, "0")}`,
...test.storageProof,
]);
await expect(verifier.verifyZkTrieProof(test.account, test.storage, proof)).revertedWith("Invalid parent node");
for (const i of [0, 1, test.storageProof.length - 3]) {
const correct = test.storageProof[i];
const prefix = correct.slice(0, 4);
for (let b = 0; b < 16; ++b) {
if (b >= 6 && b < 10) continue;
test.storageProof[i] = test.storageProof[i].replace(prefix, "0x" + chars[b >> 4] + chars[b % 16]);
await shouldRevert(test, "InvalidBranchNodeType");
test.storageProof[i] = correct;
}
}
test.accountProof[0] =
"0x000a52b818e0a009930d62c17f2b1244179b7c14f8e1ae317fb3bfd3a3ba6060031b2a4aa2df31e79f926474987eea69aab84f4581cfd61b0338438110f6be145b";
test.storageProof[0] =
"0x010a52b818e0a009930d62c17f2b1244179b7c14f8e1ae317fb3bfd3a3ba6060031b2a4aa2df31e79f926474987eea69aab84f4581cfd61b0338438110f6be145b";
await expect(verifier.verifyZkTrieProof(test.account, test.storage, proof)).revertedWith("Invalid parent node");
});
it("should revert, when BranchHashMismatch", async () => {
it("should revert, when hash mismatch", async () => {
const test = testcases[0];
for (const i of [1, 2, test.accountProof.length - 3]) {
const correct = test.accountProof[i];
for (const p of [40, 98]) {
const v = correct[p];
for (let b = 0; b < 3; ++b) {
if (v === chars[b]) continue;
test.accountProof[i] = correct.slice(0, p) + chars[b] + correct.slice(p + 1);
await shouldRevert(test, "BranchHashMismatch");
test.accountProof[i] = correct;
}
}
}
for (const i of [1, 2, test.storageProof.length - 3]) {
const correct = test.storageProof[i];
for (const p of [40, 98]) {
const v = correct[p];
for (let b = 0; b < 3; ++b) {
if (v === chars[b]) continue;
test.storageProof[i] = correct.slice(0, p) + chars[b] + correct.slice(p + 1);
await shouldRevert(test, "BranchHashMismatch");
test.storageProof[i] = correct;
}
}
}
test.accountProof[1] =
"0x0028db7c407cab6652f1f194401bd87bda33c9a1723b4f93515bd5929cad02668123fa5a3e69136c8e03a62c805f89c9d3578a6f5fac4bb281fc4d7df12fbcc5dc";
const proof = concat([
`0x${test.accountProof.length.toString(16).padStart(2, "0")}`,
...test.accountProof,
`0x${test.storageProof.length.toString(16).padStart(2, "0")}`,
...test.storageProof,
]);
await expect(verifier.verifyZkTrieProof(test.account, test.storage, proof)).revertedWith("Hash mismatch");
});
it("should revert, when InvalidAccountLeafNodeType", async () => {
it("should revert, when invalid proof magic bytes", async () => {
const test = testcases[0];
const index = test.accountProof.length - 2;
const correct = test.accountProof[index];
const prefix = correct.slice(0, 4);
for (let b = 0; b < 20; ++b) {
if (b === 4 || b === 5) continue;
test.accountProof[index] = test.accountProof[index].replace(prefix, "0x" + chars[b >> 4] + chars[b % 16]);
await shouldRevert(test, "InvalidAccountLeafNodeType");
test.accountProof[index] = correct;
}
test.accountProof[17] =
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704448";
const proof = concat([
`0x${test.accountProof.length.toString(16).padStart(2, "0")}`,
...test.accountProof,
`0x${test.storageProof.length.toString(16).padStart(2, "0")}`,
...test.storageProof,
]);
await expect(verifier.verifyZkTrieProof(test.account, test.storage, proof)).revertedWith("Invalid ProofMagicBytes");
});
it("should revert, when AccountKeyMismatch", async () => {
it("should revert, when invalid leaf node in account proof", async () => {
const test = testcases[0];
const index = test.accountProof.length - 2;
const correct = test.accountProof[index];
for (const p of [4, 10]) {
const v = correct[p];
for (let b = 0; b < 3; ++b) {
if (v === chars[b]) continue;
test.accountProof[index] = correct.slice(0, p) + chars[b] + correct.slice(p + 1);
await shouldRevert(test, "AccountKeyMismatch");
test.accountProof[index] = correct;
}
}
// Invalid leaf node in account proof
test.accountProof[16] =
"0x000aef26efde9e4bca477d460482bce3de3577f6e9a280dea6d3f9985b4151deab0508000000000000000000000000000000000000000000000000071d0000000000000000000000000000000000000000000000000000000000000013328350573dd32b38291529042b30b83bf20bfc7e18ab6a9755e2ea692d5a7644f896b0d629cf9740d72ccbc90dd6141deb3fab132f1ebc17ab963c612c7123d5a524d0158cc8291b081281272d79459760d885ea652024615d55b114b5872571b21aee99977b8681205300000000000000000000000000000000000004000000000000000000000000";
let proof = concat([
`0x${test.accountProof.length.toString(16).padStart(2, "0")}`,
...test.accountProof,
`0x${test.storageProof.length.toString(16).padStart(2, "0")}`,
...test.storageProof,
]);
await expect(verifier.verifyZkTrieProof(test.account, test.storage, proof)).revertedWith("Invalid leaf node");
// Node key mismatch in account proof
test.accountProof[16] =
"0x010aef16efde9e4bca477d460482bce3de3577f6e9a280dea6d3f9985b4151deab0508000000000000000000000000000000000000000000000000071d0000000000000000000000000000000000000000000000000000000000000013328350573dd32b38291529042b30b83bf20bfc7e18ab6a9755e2ea692d5a7644f896b0d629cf9740d72ccbc90dd6141deb3fab132f1ebc17ab963c612c7123d5a524d0158cc8291b081281272d79459760d885ea652024615d55b114b5872571b21aee99977b8681205300000000000000000000000000000000000004000000000000000000000000";
proof = concat([
`0x${test.accountProof.length.toString(16).padStart(2, "0")}`,
...test.accountProof,
`0x${test.storageProof.length.toString(16).padStart(2, "0")}`,
...test.storageProof,
]);
await expect(verifier.verifyZkTrieProof(test.account, test.storage, proof)).revertedWith("Node key mismatch");
// Invalid leaf node hash in account proof
test.accountProof[16] =
"0x010aef26efde9e4bca477d460482bce3de3577f6e9a280dea6d3f9985b4151deab0508000000000000000000000000000000000000000000000000071e0000000000000000000000000000000000000000000000000000000000000013328350573dd32b38291529042b30b83bf20bfc7e18ab6a9755e2ea692d5a7644f896b0d629cf9740d72ccbc90dd6141deb3fab132f1ebc17ab963c612c7123d5a524d0158cc8291b081281272d79459760d885ea652024615d55b114b5872571b21aee99977b8681205300000000000000000000000000000000000004000000000000000000000000";
proof = concat([
`0x${test.accountProof.length.toString(16).padStart(2, "0")}`,
...test.accountProof,
`0x${test.storageProof.length.toString(16).padStart(2, "0")}`,
...test.storageProof,
]);
await expect(verifier.verifyZkTrieProof(test.account, test.storage, proof)).revertedWith("Invalid leaf node hash");
// Invalid KeyPreimage length in account proof
test.accountProof[16] =
"0x010aef26efde9e4bca477d460482bce3de3577f6e9a280dea6d3f9985b4151deab0508000000000000000000000000000000000000000000000000071d0000000000000000000000000000000000000000000000000000000000000013328350573dd32b38291529042b30b83bf20bfc7e18ab6a9755e2ea692d5a7644f896b0d629cf9740d72ccbc90dd6141deb3fab132f1ebc17ab963c612c7123d5a524d0158cc8291b081281272d79459760d885ea652024615d55b114b5872571b21aee99977b8681215300000000000000000000000000000000000004000000000000000000000000";
proof = concat([
`0x${test.accountProof.length.toString(16).padStart(2, "0")}`,
...test.accountProof,
`0x${test.storageProof.length.toString(16).padStart(2, "0")}`,
...test.storageProof,
]);
await expect(verifier.verifyZkTrieProof(test.account, test.storage, proof)).revertedWith(
"Invalid KeyPreimage length"
);
// Invalid KeyPreimage in account proof
test.accountProof[16] =
"0x010aef26efde9e4bca477d460482bce3de3577f6e9a280dea6d3f9985b4151deab0508000000000000000000000000000000000000000000000000071d0000000000000000000000000000000000000000000000000000000000000013328350573dd32b38291529042b30b83bf20bfc7e18ab6a9755e2ea692d5a7644f896b0d629cf9740d72ccbc90dd6141deb3fab132f1ebc17ab963c612c7123d5a524d0158cc8291b081281272d79459760d885ea652024615d55b114b5872571b21aee99977b8681205300000000000000000000000000000000000003000000000000000000000000";
proof = concat([
`0x${test.accountProof.length.toString(16).padStart(2, "0")}`,
...test.accountProof,
`0x${test.storageProof.length.toString(16).padStart(2, "0")}`,
...test.storageProof,
]);
await expect(verifier.verifyZkTrieProof(test.account, test.storage, proof)).revertedWith("Invalid KeyPreimage");
});
it("should revert, when InvalidAccountCompressedFlag", async () => {
it("should revert, when storage root mismatch", async () => {
const test = testcases[0];
const index = test.accountProof.length - 2;
const correct = test.accountProof[index];
for (const replaced of ["01080000", "05010000"]) {
test.accountProof[index] = test.accountProof[index].replace("05080000", replaced);
await shouldRevert(test, "InvalidAccountCompressedFlag");
test.accountProof[index] = correct;
}
test.storageProof[0] =
"0x000a52b818e0a009930d62c17f2b1244179b7c14f8e1ae317fb3bfd3a3ba6060031b2a4aa2df31e79f926474987eea69aab84f4581cfd61b0338438110f6be145c";
const proof = concat([
`0x${test.accountProof.length.toString(16).padStart(2, "0")}`,
...test.accountProof,
`0x${test.storageProof.length.toString(16).padStart(2, "0")}`,
...test.storageProof,
]);
await expect(verifier.verifyZkTrieProof(test.account, test.storage, proof)).revertedWith("Storage root mismatch");
});
it("should revert, when InvalidAccountLeafNodeHash", async () => {
it("should revert, when invalid leaf node in storage proof", async () => {
const test = testcases[0];
const index = test.accountProof.length - 2;
const correct = test.accountProof[index];
for (const p of [80, 112, 144, 176, 208]) {
const v = correct[p];
for (let b = 0; b < 3; ++b) {
if (v === chars[b]) continue;
test.accountProof[index] = correct.slice(0, p) + chars[b] + correct.slice(p + 1);
await shouldRevert(test, "InvalidAccountLeafNodeHash");
test.accountProof[index] = correct;
}
}
// Invalid leaf node in account proof
test.storageProof[15] =
"0x0026ae15b478408eb45ea8b6f61aad1345f2b6257efd1acc4a6024b26f664c98240101000000000000000000000000000000000000000000000000000111346048bf18a14a209505174b0709a2a1997fe9797cb89648a93f17ce0096cbc1a6ed52b73170b96a";
let proof = concat([
`0x${test.accountProof.length.toString(16).padStart(2, "0")}`,
...test.accountProof,
`0x${test.storageProof.length.toString(16).padStart(2, "0")}`,
...test.storageProof,
]);
await expect(verifier.verifyZkTrieProof(test.account, test.storage, proof)).revertedWith("Invalid leaf node");
// Node key mismatch in account proof
test.storageProof[15] =
"0x0136ae15b478408eb45ea8b6f61aad1345f2b6257efd1acc4a6024b26f664c98240101000000000000000000000000000000000000000000000000000111346048bf18a14a209505174b0709a2a1997fe9797cb89648a93f17ce0096cbc1a6ed52b73170b96a";
proof = concat([
`0x${test.accountProof.length.toString(16).padStart(2, "0")}`,
...test.accountProof,
`0x${test.storageProof.length.toString(16).padStart(2, "0")}`,
...test.storageProof,
]);
await expect(verifier.verifyZkTrieProof(test.account, test.storage, proof)).revertedWith("Node key mismatch");
// Invalid leaf node hash in account proof
test.storageProof[15] =
"0x0126ae15b478408eb45ea8b6f61aad1345f2b6257efd1acc4a6024b26f664c98240101000000000000000000000000000000000000000000000000000111446048bf18a14a209505174b0709a2a1997fe9797cb89648a93f17ce0096cbc1a6ed52b73170b96a";
proof = concat([
`0x${test.accountProof.length.toString(16).padStart(2, "0")}`,
...test.accountProof,
`0x${test.storageProof.length.toString(16).padStart(2, "0")}`,
...test.storageProof,
]);
await expect(verifier.verifyZkTrieProof(test.account, test.storage, proof)).revertedWith("Invalid leaf node hash");
// Invalid KeyPreimage length in account proof
test.storageProof[15] =
"0x0126ae15b478408eb45ea8b6f61aad1345f2b6257efd1acc4a6024b26f664c98240101000000000000000000000000000000000000000000000000000111346048bf18a14a219505174b0709a2a1997fe9797cb89648a93f17ce0096cbc1a6ed52b73170b96a";
proof = concat([
`0x${test.accountProof.length.toString(16).padStart(2, "0")}`,
...test.accountProof,
`0x${test.storageProof.length.toString(16).padStart(2, "0")}`,
...test.storageProof,
]);
await expect(verifier.verifyZkTrieProof(test.account, test.storage, proof)).revertedWith(
"Invalid KeyPreimage length"
);
// Invalid KeyPreimage in account proof
test.storageProof[15] =
"0x0126ae15b478408eb45ea8b6f61aad1345f2b6257efd1acc4a6024b26f664c98240101000000000000000000000000000000000000000000000000000111346048bf18a14a209505174b0709a2a1997fe9797cb89648a93f17ce0096cbc1a6ed52b73170b97a";
proof = concat([
`0x${test.accountProof.length.toString(16).padStart(2, "0")}`,
...test.accountProof,
`0x${test.storageProof.length.toString(16).padStart(2, "0")}`,
...test.storageProof,
]);
await expect(verifier.verifyZkTrieProof(test.account, test.storage, proof)).revertedWith("Invalid KeyPreimage");
});
it("should revert, when InvalidAccountKeyPreimageLength", async () => {
it("should revert, when proof length mismatch", async () => {
const test = testcases[0];
const index = test.accountProof.length - 2;
const correct = test.accountProof[index];
for (const p of [396, 397]) {
const v = correct[p];
for (let b = 0; b < 3; ++b) {
if (v === chars[b]) continue;
test.accountProof[index] = correct.slice(0, p) + chars[b] + correct.slice(p + 1);
await shouldRevert(test, "InvalidAccountKeyPreimageLength");
test.accountProof[index] = correct;
}
}
});
it("should revert, when InvalidAccountKeyPreimage", async () => {
const test = testcases[0];
const index = test.accountProof.length - 2;
const correct = test.accountProof[index];
for (const p of [398, 438]) {
const v = correct[p];
for (let b = 0; b < 3; ++b) {
if (v === chars[b]) continue;
test.accountProof[index] = correct.slice(0, p) + chars[b] + correct.slice(p + 1);
await shouldRevert(test, "InvalidAccountKeyPreimage");
test.accountProof[index] = correct;
}
}
});
it("should revert, when InvalidProofMagicBytes", async () => {
const test = testcases[0];
let index = test.accountProof.length - 1;
let correct = test.accountProof[index];
for (const p of [2, 32, 91]) {
const v = correct[p];
for (let b = 0; b < 3; ++b) {
if (v === chars[b]) continue;
test.accountProof[index] = correct.slice(0, p) + chars[b] + correct.slice(p + 1);
await shouldRevert(test, "InvalidProofMagicBytes");
test.accountProof[index] = correct;
}
}
index = test.storageProof.length - 1;
correct = test.storageProof[index];
for (const p of [2, 32, 91]) {
const v = correct[p];
for (let b = 0; b < 3; ++b) {
if (v === chars[b]) continue;
test.storageProof[index] = correct.slice(0, p) + chars[b] + correct.slice(p + 1);
await shouldRevert(test, "InvalidProofMagicBytes");
test.storageProof[index] = correct;
}
}
});
it("should revert, when InvalidAccountLeafNodeHash", async () => {
const test = testcases[0];
const correct = test.storageProof.slice();
test.storageProof = [
"0x05",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449",
];
await shouldRevert(test, "InvalidAccountLeafNodeHash");
test.storageProof = correct;
});
it("should revert, when InvalidStorageLeafNodeType", async () => {
const test = testcases[0];
const index = test.storageProof.length - 2;
const correct = test.storageProof[index];
const prefix = correct.slice(0, 4);
for (let b = 0; b < 20; ++b) {
if (b === 4 || b === 5) continue;
test.storageProof[index] = test.storageProof[index].replace(prefix, "0x" + chars[b >> 4] + chars[b % 16]);
await shouldRevert(test, "InvalidStorageLeafNodeType");
test.storageProof[index] = correct;
}
});
it("should revert, when StorageKeyMismatch", async () => {
const test = testcases[0];
const index = test.storageProof.length - 2;
const correct = test.storageProof[index];
for (const p of [4, 10]) {
const v = correct[p];
for (let b = 0; b < 3; ++b) {
if (v === chars[b]) continue;
test.storageProof[index] = correct.slice(0, p) + chars[b] + correct.slice(p + 1);
await shouldRevert(test, "StorageKeyMismatch");
test.storageProof[index] = correct;
}
}
});
it("should revert, when InvalidStorageCompressedFlag", async () => {
const test = testcases[0];
const index = test.storageProof.length - 2;
const correct = test.storageProof[index];
for (const replaced of ["00010000", "01000000"]) {
test.storageProof[index] = test.storageProof[index].replace("01010000", replaced);
await shouldRevert(test, "InvalidStorageCompressedFlag");
test.storageProof[index] = correct;
}
});
it("should revert, when InvalidStorageLeafNodeHash", async () => {
const test = testcases[0];
const index = test.storageProof.length - 2;
const correct = test.storageProof[index];
for (const p of [100, 132]) {
const v = correct[p];
for (let b = 0; b < 3; ++b) {
if (v === chars[b]) continue;
test.storageProof[index] = correct.slice(0, p) + chars[b] + correct.slice(p + 1);
await shouldRevert(test, "InvalidStorageLeafNodeHash");
test.storageProof[index] = correct;
}
}
});
it("should revert, when InvalidStorageKeyPreimageLength", async () => {
const test = testcases[0];
const index = test.storageProof.length - 2;
const correct = test.storageProof[index];
for (const p of [140, 141]) {
const v = correct[p];
for (let b = 0; b < 3; ++b) {
if (v === chars[b]) continue;
test.storageProof[index] = correct.slice(0, p) + chars[b] + correct.slice(p + 1);
await shouldRevert(test, "InvalidStorageKeyPreimageLength");
test.storageProof[index] = correct;
}
}
});
it("should revert, when InvalidStorageKeyPreimage", async () => {
const test = testcases[0];
const index = test.storageProof.length - 2;
const correct = test.storageProof[index];
for (const p of [142, 205]) {
const v = correct[p];
for (let b = 0; b < 3; ++b) {
if (v === chars[b]) continue;
test.storageProof[index] = correct.slice(0, p) + chars[b] + correct.slice(p + 1);
await shouldRevert(test, "InvalidStorageKeyPreimage");
test.storageProof[index] = correct;
}
}
});
it("should revert, when InvalidStorageEmptyLeafNodeHash", async () => {
const test = testcases[0];
const index = test.storageProof.length - 2;
const correct = test.storageProof[index];
test.storageProof[index] = "0x05";
await shouldRevert(test, "InvalidStorageEmptyLeafNodeHash");
test.storageProof[index] = correct;
});
it("should revert, when ProofLengthMismatch", async () => {
const test = testcases[0];
await shouldRevert(test, "ProofLengthMismatch", "0x0000");
const proof = concat([
`0x${test.accountProof.length.toString(16).padStart(2, "0")}`,
...test.accountProof,
`0x${test.storageProof.length.toString(16).padStart(2, "0")}`,
...test.storageProof,
"0x00",
]);
await expect(verifier.verifyZkTrieProof(test.account, test.storage, proof)).revertedWith("Proof length mismatch");
});
});

Binary file not shown.

Binary file not shown.

File diff suppressed because it is too large Load Diff

View File

@@ -12,7 +12,6 @@
"lint:ts": "./node_modules/.bin/prettier --write 'integration-test/**/*.ts' 'scripts/**/*.ts' *.ts",
"lint": "yarn lint:ts && yarn lint:sol",
"coverage": "hardhat coverage",
"coverage:forge": "forge coverage",
"prepare": "cd .. && husky install contracts/.husky"
},
"devDependencies": {

View File

@@ -33,7 +33,7 @@ env CONTRACT_NAME=L2ERC1155Gateway npx hardhat run --network $layer2 scripts/dep
env CONTRACT_NAME=L2ETHGateway npx hardhat run --network $layer2 scripts/deploy_proxy_contract.ts
env CONTRACT_NAME=L2WETHGateway npx hardhat run --network $layer2 scripts/deploy_proxy_contract.ts
# initialize contracts in layer 1, should set proper bash env variables first
# initalize contracts in layer 1, should set proper bash env variables first
npx hardhat --network $layer1 run scripts/initialize_l1_erc20_gateway.ts
npx hardhat --network $layer1 run scripts/initialize_l1_gateway_router.ts
npx hardhat --network $layer1 run scripts/initialize_scroll_chain.ts
@@ -42,7 +42,7 @@ npx hardhat --network $layer1 run scripts/initialize_l1_custom_erc20_gateway.ts
npx hardhat --network $layer1 run scripts/initialize_l1_erc1155_gateway.ts
npx hardhat --network $layer1 run scripts/initialize_l1_erc721_gateway.ts
# initialize contracts in layer 2, should set proper bash env variables first
# initalize contracts in layer 2, should set proper bash env variables first
npx hardhat --network $layer2 run scripts/initialize_l2_erc20_gateway.ts
npx hardhat --network $layer2 run scripts/initialize_l2_gateway_router.ts
npx hardhat --network $layer2 run scripts/initialize_l2_custom_erc20_gateway.ts

View File

@@ -2,7 +2,7 @@
import * as dotenv from "dotenv";
import { ethers } from "hardhat";
import { generateABI, createCode } from "../scripts/poseidon";
import poseidonUnit from "circomlib/src/poseidon_gencontract";
dotenv.config();
@@ -15,7 +15,11 @@ async function main() {
let PoseidonUnit2Address = process.env.POSEIDON_UNIT2_ADDR;
if (!PoseidonUnit2Address) {
const Poseidon2Elements = new ethers.ContractFactory(generateABI(2), createCode(2), deployer);
const Poseidon2Elements = new ethers.ContractFactory(
poseidonUnit.generateABI(2),
poseidonUnit.createCode(2),
deployer
);
const poseidon = await Poseidon2Elements.deploy();
console.log("Deploy PoseidonUnit2 contract, hash:", poseidon.deployTransaction.hash);
@@ -24,9 +28,7 @@ async function main() {
PoseidonUnit2Address = poseidon.address;
}
const verifier = await ScrollChainCommitmentVerifier.deploy(PoseidonUnit2Address, L1ScrollChainAddress, {
gasPrice: 1e9,
});
const verifier = await ScrollChainCommitmentVerifier.deploy(PoseidonUnit2Address, L1ScrollChainAddress);
console.log("Deploy ScrollChainCommitmentVerifier contract, hash:", verifier.deployTransaction.hash);
const receipt = await verifier.deployTransaction.wait();
console.log(`✅ Deploy ScrollChainCommitmentVerifier contract at: ${verifier.address}, gas used: ${receipt.gasUsed}`);

View File

@@ -1,16 +1,11 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
// solhint-disable no-console
pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";
import {Fallback} from "../../src/misc/Fallback.sol";
// solhint-disable state-visibility
// solhint-disable var-name-mixedcase
contract DeployFallbackContracts is Script {
uint256 DEPLOYER_PRIVATE_KEY = vm.envUint("DEPLOYER_PRIVATE_KEY");
uint256 NUM_CONTRACTS = vm.envUint("NUM_CONTRACTS");

View File

@@ -1,7 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
// solhint-disable no-console
pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";
@@ -15,7 +13,7 @@ import {L1ERC1155Gateway} from "../../src/L1/gateways/L1ERC1155Gateway.sol";
import {L1ERC721Gateway} from "../../src/L1/gateways/L1ERC721Gateway.sol";
import {L1ETHGateway} from "../../src/L1/gateways/L1ETHGateway.sol";
import {L1GatewayRouter} from "../../src/L1/gateways/L1GatewayRouter.sol";
import {L1MessageQueueWithGasPriceOracle} from "../../src/L1/rollup/L1MessageQueueWithGasPriceOracle.sol";
import {L1MessageQueue} from "../../src/L1/rollup/L1MessageQueue.sol";
import {L1ScrollMessenger} from "../../src/L1/L1ScrollMessenger.sol";
import {L1StandardERC20Gateway} from "../../src/L1/gateways/L1StandardERC20Gateway.sol";
import {L1WETHGateway} from "../../src/L1/gateways/L1WETHGateway.sol";
@@ -25,10 +23,6 @@ import {ScrollChain} from "../../src/L1/rollup/ScrollChain.sol";
import {Whitelist} from "../../src/L2/predeploys/Whitelist.sol";
import {ZkEvmVerifierV1} from "../../src/libraries/verifier/ZkEvmVerifierV1.sol";
// solhint-disable max-states-count
// solhint-disable state-visibility
// solhint-disable var-name-mixedcase
contract DeployL1BridgeContracts is Script {
uint256 L1_DEPLOYER_PRIVATE_KEY = vm.envUint("L1_DEPLOYER_PRIVATE_KEY");
@@ -39,45 +33,25 @@ contract DeployL1BridgeContracts is Script {
address L1_PLONK_VERIFIER_ADDR = vm.envAddress("L1_PLONK_VERIFIER_ADDR");
address L1_PROXY_ADMIN_ADDR = vm.envAddress("L1_PROXY_ADMIN_ADDR");
address L1_SCROLL_CHAIN_PROXY_ADDR = vm.envAddress("L1_SCROLL_CHAIN_PROXY_ADDR");
address L1_MESSAGE_QUEUE_PROXY_ADDR = vm.envAddress("L1_MESSAGE_QUEUE_PROXY_ADDR");
address L1_SCROLL_MESSENGER_PROXY_ADDR = vm.envAddress("L1_SCROLL_MESSENGER_PROXY_ADDR");
address L2_SCROLL_MESSENGER_PROXY_ADDR = vm.envAddress("L2_SCROLL_MESSENGER_PROXY_ADDR");
address L2_CUSTOM_ERC20_GATEWAY_PROXY_ADDR = vm.envAddress("L2_CUSTOM_ERC20_GATEWAY_PROXY_ADDR");
address L2_ERC721_GATEWAY_PROXY_ADDR = vm.envAddress("L2_ERC721_GATEWAY_PROXY_ADDR");
address L2_ERC1155_GATEWAY_PROXY_ADDR = vm.envAddress("L2_ERC1155_GATEWAY_PROXY_ADDR");
address L2_ETH_GATEWAY_PROXY_ADDR = vm.envAddress("L2_ETH_GATEWAY_PROXY_ADDR");
address L2_STANDARD_ERC20_GATEWAY_PROXY_ADDR = vm.envAddress("L2_STANDARD_ERC20_GATEWAY_PROXY_ADDR");
address L2_WETH_GATEWAY_PROXY_ADDR = vm.envAddress("L2_WETH_GATEWAY_PROXY_ADDR");
address L2_SCROLL_STANDARD_ERC20_ADDR = vm.envAddress("L2_SCROLL_STANDARD_ERC20_ADDR");
address L2_SCROLL_STANDARD_ERC20_FACTORY_ADDR = vm.envAddress("L2_SCROLL_STANDARD_ERC20_FACTORY_ADDR");
ZkEvmVerifierV1 zkEvmVerifierV1;
MultipleVersionRollupVerifier rollupVerifier;
EnforcedTxGateway enforcedTxGateway;
ProxyAdmin proxyAdmin;
L1GatewayRouter router;
function run() external {
proxyAdmin = ProxyAdmin(L1_PROXY_ADMIN_ADDR);
vm.startBroadcast(L1_DEPLOYER_PRIVATE_KEY);
deployZkEvmVerifierV1();
deployMultipleVersionRollupVerifier();
deployProxyAdmin();
deployL1Whitelist();
deployEnforcedTxGateway();
deployL1MessageQueue();
deployL2GasPriceOracle();
deployScrollChain();
deployL1ScrollMessenger();
deployL1GatewayRouter();
deployL1ETHGateway();
deployL1WETHGateway();
deployL1StandardERC20Gateway();
deployL1GatewayRouter();
deployL1ScrollMessenger();
deployEnforcedTxGateway();
deployL1CustomERC20Gateway();
deployL1ERC721Gateway();
deployL1ERC1155Gateway();
@@ -92,11 +66,17 @@ contract DeployL1BridgeContracts is Script {
}
function deployMultipleVersionRollupVerifier() internal {
rollupVerifier = new MultipleVersionRollupVerifier(address(zkEvmVerifierV1));
MultipleVersionRollupVerifier rollupVerifier = new MultipleVersionRollupVerifier(address(zkEvmVerifierV1));
logAddress("L1_MULTIPLE_VERSION_ROLLUP_VERIFIER_ADDR", address(rollupVerifier));
}
function deployProxyAdmin() internal {
proxyAdmin = new ProxyAdmin();
logAddress("L1_PROXY_ADMIN_ADDR", address(proxyAdmin));
}
function deployL1Whitelist() internal {
address owner = vm.addr(L1_DEPLOYER_PRIVATE_KEY);
Whitelist whitelist = new Whitelist(owner);
@@ -105,28 +85,26 @@ contract DeployL1BridgeContracts is Script {
}
function deployScrollChain() internal {
ScrollChain impl = new ScrollChain(CHAIN_ID_L2, L1_MESSAGE_QUEUE_PROXY_ADDR, address(rollupVerifier));
ScrollChain impl = new ScrollChain(CHAIN_ID_L2);
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
new bytes(0)
);
logAddress("L1_SCROLL_CHAIN_IMPLEMENTATION_ADDR", address(impl));
logAddress("L1_SCROLL_CHAIN_PROXY_ADDR", address(proxy));
}
function deployL1MessageQueue() internal {
L1MessageQueueWithGasPriceOracle impl = new L1MessageQueueWithGasPriceOracle(
L1_SCROLL_MESSENGER_PROXY_ADDR,
L1_SCROLL_CHAIN_PROXY_ADDR,
address(enforcedTxGateway)
L1MessageQueue impl = new L1MessageQueue();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
new bytes(0)
);
logAddress("L1_MESSAGE_QUEUE_IMPLEMENTATION_ADDR", address(impl));
}
function deployL1ScrollMessenger() internal {
L1ScrollMessenger impl = new L1ScrollMessenger(
L2_SCROLL_MESSENGER_PROXY_ADDR,
L1_SCROLL_CHAIN_PROXY_ADDR,
L1_MESSAGE_QUEUE_PROXY_ADDR
);
logAddress("L1_SCROLL_MESSENGER_IMPLEMENTATION_ADDR", address(impl));
logAddress("L1_MESSAGE_QUEUE_PROXY_ADDR", address(proxy));
}
function deployL2GasPriceOracle() internal {
@@ -140,6 +118,42 @@ contract DeployL1BridgeContracts is Script {
logAddress("L2_GAS_PRICE_ORACLE_PROXY_ADDR", address(proxy));
}
function deployL1StandardERC20Gateway() internal {
L1StandardERC20Gateway impl = new L1StandardERC20Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
new bytes(0)
);
logAddress("L1_STANDARD_ERC20_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
logAddress("L1_STANDARD_ERC20_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL1ETHGateway() internal {
L1ETHGateway impl = new L1ETHGateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
new bytes(0)
);
logAddress("L1_ETH_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
logAddress("L1_ETH_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL1WETHGateway() internal {
L1WETHGateway impl = new L1WETHGateway(L1_WETH_ADDR, L2_WETH_ADDR);
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
new bytes(0)
);
logAddress("L1_WETH_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
logAddress("L1_WETH_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL1GatewayRouter() internal {
L1GatewayRouter impl = new L1GatewayRouter();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
@@ -150,64 +164,18 @@ contract DeployL1BridgeContracts is Script {
logAddress("L1_GATEWAY_ROUTER_IMPLEMENTATION_ADDR", address(impl));
logAddress("L1_GATEWAY_ROUTER_PROXY_ADDR", address(proxy));
router = L1GatewayRouter(address(proxy));
}
function deployL1StandardERC20Gateway() internal {
L1StandardERC20Gateway impl = new L1StandardERC20Gateway(
L2_STANDARD_ERC20_GATEWAY_PROXY_ADDR,
address(router),
L1_SCROLL_MESSENGER_PROXY_ADDR,
L2_SCROLL_STANDARD_ERC20_ADDR,
L2_SCROLL_STANDARD_ERC20_FACTORY_ADDR
function deployL1ScrollMessenger() internal {
L1ScrollMessenger impl = new L1ScrollMessenger();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
new bytes(0)
);
logAddress("L1_STANDARD_ERC20_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
}
function deployL1ETHGateway() internal {
L1ETHGateway impl = new L1ETHGateway(
L2_ETH_GATEWAY_PROXY_ADDR,
address(router),
L1_SCROLL_MESSENGER_PROXY_ADDR
);
logAddress("L1_ETH_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
}
function deployL1WETHGateway() internal {
L1WETHGateway impl = new L1WETHGateway(
L1_WETH_ADDR,
L2_WETH_ADDR,
L2_WETH_GATEWAY_PROXY_ADDR,
address(router),
L1_SCROLL_MESSENGER_PROXY_ADDR
);
logAddress("L1_WETH_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
}
function deployL1CustomERC20Gateway() internal {
L1CustomERC20Gateway impl = new L1CustomERC20Gateway(
L2_CUSTOM_ERC20_GATEWAY_PROXY_ADDR,
address(router),
L1_SCROLL_MESSENGER_PROXY_ADDR
);
logAddress("L1_CUSTOM_ERC20_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
}
function deployL1ERC721Gateway() internal {
L1ERC721Gateway impl = new L1ERC721Gateway(L2_ERC721_GATEWAY_PROXY_ADDR, L1_SCROLL_MESSENGER_PROXY_ADDR);
logAddress("L1_ERC721_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
}
function deployL1ERC1155Gateway() internal {
L1ERC1155Gateway impl = new L1ERC1155Gateway(L2_ERC1155_GATEWAY_PROXY_ADDR, L1_SCROLL_MESSENGER_PROXY_ADDR);
logAddress("L1_ERC1155_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
logAddress("L1_SCROLL_MESSENGER_IMPLEMENTATION_ADDR", address(impl));
logAddress("L1_SCROLL_MESSENGER_PROXY_ADDR", address(proxy));
}
function deployEnforcedTxGateway() internal {
@@ -220,7 +188,42 @@ contract DeployL1BridgeContracts is Script {
logAddress("L1_ENFORCED_TX_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
logAddress("L1_ENFORCED_TX_GATEWAY_PROXY_ADDR", address(proxy));
enforcedTxGateway = EnforcedTxGateway(address(proxy));
}
function deployL1CustomERC20Gateway() internal {
L1CustomERC20Gateway impl = new L1CustomERC20Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
new bytes(0)
);
logAddress("L1_CUSTOM_ERC20_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
logAddress("L1_CUSTOM_ERC20_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL1ERC721Gateway() internal {
L1ERC721Gateway impl = new L1ERC721Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
new bytes(0)
);
logAddress("L1_ERC721_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
logAddress("L1_ERC721_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL1ERC1155Gateway() internal {
L1ERC1155Gateway impl = new L1ERC1155Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
new bytes(0)
);
logAddress("L1_ERC1155_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
logAddress("L1_ERC1155_GATEWAY_PROXY_ADDR", address(proxy));
}
function logAddress(string memory name, address addr) internal view {

View File

@@ -1,145 +0,0 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
// solhint-disable no-console
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";
import {ProxyAdmin} from "@openzeppelin/contracts/proxy/transparent/ProxyAdmin.sol";
import {TransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol";
import {EmptyContract} from "../../src/misc/EmptyContract.sol";
// solhint-disable state-visibility
// solhint-disable var-name-mixedcase
contract DeployL1BridgeProxyPlaceholder is Script {
uint256 L1_DEPLOYER_PRIVATE_KEY = vm.envUint("L1_DEPLOYER_PRIVATE_KEY");
ProxyAdmin proxyAdmin;
EmptyContract placeholder;
function run() external {
vm.startBroadcast(L1_DEPLOYER_PRIVATE_KEY);
deployProxyAdmin();
deployPlaceHolder();
deployL1MessageQueue();
deployScrollChain();
deployL1ETHGateway();
deployL1WETHGateway();
deployL1StandardERC20Gateway();
deployL1ScrollMessenger();
deployL1CustomERC20Gateway();
deployL1ERC721Gateway();
deployL1ERC1155Gateway();
vm.stopBroadcast();
}
function deployProxyAdmin() internal {
proxyAdmin = new ProxyAdmin();
logAddress("L1_PROXY_ADMIN_ADDR", address(proxyAdmin));
}
function deployPlaceHolder() internal {
placeholder = new EmptyContract();
logAddress("L1_PROXY_IMPLEMENTATION_PLACEHOLDER_ADDR", address(placeholder));
}
function deployScrollChain() internal {
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(placeholder),
address(proxyAdmin),
new bytes(0)
);
logAddress("L1_SCROLL_CHAIN_PROXY_ADDR", address(proxy));
}
function deployL1MessageQueue() internal {
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(placeholder),
address(proxyAdmin),
new bytes(0)
);
logAddress("L1_MESSAGE_QUEUE_PROXY_ADDR", address(proxy));
}
function deployL1StandardERC20Gateway() internal {
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(placeholder),
address(proxyAdmin),
new bytes(0)
);
logAddress("L1_STANDARD_ERC20_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL1ETHGateway() internal {
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(placeholder),
address(proxyAdmin),
new bytes(0)
);
logAddress("L1_ETH_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL1WETHGateway() internal {
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(placeholder),
address(proxyAdmin),
new bytes(0)
);
logAddress("L1_WETH_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL1ScrollMessenger() internal {
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(placeholder),
address(proxyAdmin),
new bytes(0)
);
logAddress("L1_SCROLL_MESSENGER_PROXY_ADDR", address(proxy));
}
function deployL1CustomERC20Gateway() internal {
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(placeholder),
address(proxyAdmin),
new bytes(0)
);
logAddress("L1_CUSTOM_ERC20_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL1ERC721Gateway() internal {
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(placeholder),
address(proxyAdmin),
new bytes(0)
);
logAddress("L1_ERC721_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL1ERC1155Gateway() internal {
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(placeholder),
address(proxyAdmin),
new bytes(0)
);
logAddress("L1_ERC1155_GATEWAY_PROXY_ADDR", address(proxy));
}
function logAddress(string memory name, address addr) internal view {
console.log(string(abi.encodePacked(name, "=", vm.toString(address(addr)))));
}
}

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";

View File

@@ -1,7 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
// solhint-disable no-console
pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";
@@ -24,15 +22,9 @@ import {Whitelist} from "../../src/L2/predeploys/Whitelist.sol";
import {ScrollStandardERC20} from "../../src/libraries/token/ScrollStandardERC20.sol";
import {ScrollStandardERC20Factory} from "../../src/libraries/token/ScrollStandardERC20Factory.sol";
// solhint-disable max-states-count
// solhint-disable state-visibility
// solhint-disable var-name-mixedcase
contract DeployL2BridgeContracts is Script {
uint256 L2_DEPLOYER_PRIVATE_KEY = vm.envUint("L2_DEPLOYER_PRIVATE_KEY");
address L2_PROXY_ADMIN_ADDR = vm.envAddress("L2_PROXY_ADMIN_ADDR");
address L1_TX_FEE_RECIPIENT_ADDR = vm.envAddress("L1_TX_FEE_RECIPIENT_ADDR");
address L1_WETH_ADDR = vm.envAddress("L1_WETH_ADDR");
address L2_WETH_ADDR = vm.envAddress("L2_WETH_ADDR");
@@ -40,18 +32,6 @@ contract DeployL2BridgeContracts is Script {
L1GasPriceOracle oracle;
L2MessageQueue queue;
ProxyAdmin proxyAdmin;
L2GatewayRouter router;
ScrollStandardERC20Factory factory;
address L2_SCROLL_MESSENGER_PROXY_ADDR = vm.envAddress("L2_SCROLL_MESSENGER_PROXY_ADDR");
address L1_SCROLL_MESSENGER_PROXY_ADDR = vm.envAddress("L1_SCROLL_MESSENGER_PROXY_ADDR");
address L1_CUSTOM_ERC20_GATEWAY_PROXY_ADDR = vm.envAddress("L1_CUSTOM_ERC20_GATEWAY_PROXY_ADDR");
address L1_ERC721_GATEWAY_PROXY_ADDR = vm.envAddress("L1_ERC721_GATEWAY_PROXY_ADDR");
address L1_ERC1155_GATEWAY_PROXY_ADDR = vm.envAddress("L1_ERC1155_GATEWAY_PROXY_ADDR");
address L1_ETH_GATEWAY_PROXY_ADDR = vm.envAddress("L1_ETH_GATEWAY_PROXY_ADDR");
address L1_STANDARD_ERC20_GATEWAY_PROXY_ADDR = vm.envAddress("L1_STANDARD_ERC20_GATEWAY_PROXY_ADDR");
address L1_WETH_GATEWAY_PROXY_ADDR = vm.envAddress("L1_WETH_GATEWAY_PROXY_ADDR");
// predeploy contracts
address L1_GAS_PRICE_ORACLE_PREDEPLOY_ADDR = vm.envOr("L1_GAS_PRICE_ORACLE_PREDEPLOY_ADDR", address(0));
@@ -60,8 +40,6 @@ contract DeployL2BridgeContracts is Script {
address L2_WHITELIST_PREDEPLOY_ADDR = vm.envOr("L2_WHITELIST_PREDEPLOY_ADDR", address(0));
function run() external {
proxyAdmin = ProxyAdmin(L2_PROXY_ADMIN_ADDR);
vm.startBroadcast(L2_DEPLOYER_PRIVATE_KEY);
// predeploys
@@ -71,12 +49,13 @@ contract DeployL2BridgeContracts is Script {
deployL2Whitelist();
// upgradable
deployProxyAdmin();
deployL2ScrollMessenger();
deployL2GatewayRouter();
deployScrollStandardERC20Factory();
deployL2StandardERC20Gateway();
deployL2ETHGateway();
deployL2WETHGateway();
deployL2StandardERC20Gateway();
deployL2GatewayRouter();
deployScrollStandardERC20Factory();
deployL2CustomERC20Gateway();
deployL2ERC721Gateway();
deployL2ERC1155Gateway();
@@ -134,10 +113,58 @@ contract DeployL2BridgeContracts is Script {
logAddress("L2_WHITELIST_ADDR", address(whitelist));
}
function deployProxyAdmin() internal {
proxyAdmin = new ProxyAdmin();
logAddress("L2_PROXY_ADMIN_ADDR", address(proxyAdmin));
}
function deployL2ScrollMessenger() internal {
L2ScrollMessenger impl = new L2ScrollMessenger(L1_SCROLL_MESSENGER_PROXY_ADDR, address(queue));
L2ScrollMessenger impl = new L2ScrollMessenger(address(queue));
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
new bytes(0)
);
logAddress("L2_SCROLL_MESSENGER_IMPLEMENTATION_ADDR", address(impl));
logAddress("L2_SCROLL_MESSENGER_PROXY_ADDR", address(proxy));
}
function deployL2StandardERC20Gateway() internal {
L2StandardERC20Gateway impl = new L2StandardERC20Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
new bytes(0)
);
logAddress("L2_STANDARD_ERC20_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
logAddress("L2_STANDARD_ERC20_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL2ETHGateway() internal {
L2ETHGateway impl = new L2ETHGateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
new bytes(0)
);
logAddress("L2_ETH_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
logAddress("L2_ETH_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL2WETHGateway() internal {
L2WETHGateway impl = new L2WETHGateway(L2_WETH_ADDR, L1_WETH_ADDR);
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
new bytes(0)
);
logAddress("L2_WETH_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
logAddress("L2_WETH_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL2GatewayRouter() internal {
@@ -150,70 +177,50 @@ contract DeployL2BridgeContracts is Script {
logAddress("L2_GATEWAY_ROUTER_IMPLEMENTATION_ADDR", address(impl));
logAddress("L2_GATEWAY_ROUTER_PROXY_ADDR", address(proxy));
router = L2GatewayRouter(address(proxy));
}
function deployScrollStandardERC20Factory() internal {
ScrollStandardERC20 tokenImpl = new ScrollStandardERC20();
factory = new ScrollStandardERC20Factory(address(tokenImpl));
ScrollStandardERC20Factory scrollStandardERC20Factory = new ScrollStandardERC20Factory(address(tokenImpl));
logAddress("L2_SCROLL_STANDARD_ERC20_ADDR", address(tokenImpl));
logAddress("L2_SCROLL_STANDARD_ERC20_FACTORY_ADDR", address(factory));
}
function deployL2StandardERC20Gateway() internal {
L2StandardERC20Gateway impl = new L2StandardERC20Gateway(
L1_STANDARD_ERC20_GATEWAY_PROXY_ADDR,
address(router),
L2_SCROLL_MESSENGER_PROXY_ADDR,
address(factory)
);
logAddress("L2_STANDARD_ERC20_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
}
function deployL2ETHGateway() internal {
L2ETHGateway impl = new L2ETHGateway(
L1_ETH_GATEWAY_PROXY_ADDR,
address(router),
L2_SCROLL_MESSENGER_PROXY_ADDR
);
logAddress("L2_ETH_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
}
function deployL2WETHGateway() internal {
L2WETHGateway impl = new L2WETHGateway(
L2_WETH_ADDR,
L1_WETH_ADDR,
L1_WETH_GATEWAY_PROXY_ADDR,
address(router),
L2_SCROLL_MESSENGER_PROXY_ADDR
);
logAddress("L2_WETH_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
logAddress("L2_SCROLL_STANDARD_ERC20_FACTORY_ADDR", address(scrollStandardERC20Factory));
}
function deployL2CustomERC20Gateway() internal {
L2CustomERC20Gateway impl = new L2CustomERC20Gateway(
L1_CUSTOM_ERC20_GATEWAY_PROXY_ADDR,
address(router),
L2_SCROLL_MESSENGER_PROXY_ADDR
L2CustomERC20Gateway impl = new L2CustomERC20Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
new bytes(0)
);
logAddress("L2_CUSTOM_ERC20_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
logAddress("L2_CUSTOM_ERC20_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL2ERC721Gateway() internal {
L2ERC721Gateway impl = new L2ERC721Gateway(L1_ERC721_GATEWAY_PROXY_ADDR, L2_SCROLL_MESSENGER_PROXY_ADDR);
L2ERC721Gateway impl = new L2ERC721Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
new bytes(0)
);
logAddress("L2_ERC721_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
logAddress("L2_ERC721_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL2ERC1155Gateway() internal {
L2ERC1155Gateway impl = new L2ERC1155Gateway(L1_ERC1155_GATEWAY_PROXY_ADDR, L2_SCROLL_MESSENGER_PROXY_ADDR);
L2ERC1155Gateway impl = new L2ERC1155Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
new bytes(0)
);
logAddress("L2_ERC1155_GATEWAY_IMPLEMENTATION_ADDR", address(impl));
logAddress("L2_ERC1155_GATEWAY_PROXY_ADDR", address(proxy));
}
function logAddress(string memory name, address addr) internal view {

View File

@@ -1,125 +0,0 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
// solhint-disable no-console
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";
import {ProxyAdmin} from "@openzeppelin/contracts/proxy/transparent/ProxyAdmin.sol";
import {TransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol";
import {EmptyContract} from "../../src/misc/EmptyContract.sol";
// solhint-disable state-visibility
// solhint-disable var-name-mixedcase
contract DeployL2BridgeProxyPlaceholder is Script {
uint256 L2_DEPLOYER_PRIVATE_KEY = vm.envUint("L2_DEPLOYER_PRIVATE_KEY");
ProxyAdmin proxyAdmin;
EmptyContract placeholder;
function run() external {
vm.startBroadcast(L2_DEPLOYER_PRIVATE_KEY);
// upgradable
deployProxyAdmin();
deployPlaceHolder();
deployL2ScrollMessenger();
deployL2ETHGateway();
deployL2WETHGateway();
deployL2StandardERC20Gateway();
deployL2CustomERC20Gateway();
deployL2ERC721Gateway();
deployL2ERC1155Gateway();
vm.stopBroadcast();
}
function deployProxyAdmin() internal {
proxyAdmin = new ProxyAdmin();
logAddress("L2_PROXY_ADMIN_ADDR", address(proxyAdmin));
}
function deployPlaceHolder() internal {
placeholder = new EmptyContract();
logAddress("L2_PROXY_IMPLEMENTATION_PLACEHOLDER_ADDR", address(placeholder));
}
function deployL2ScrollMessenger() internal {
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(placeholder),
address(proxyAdmin),
new bytes(0)
);
logAddress("L2_SCROLL_MESSENGER_PROXY_ADDR", address(proxy));
}
function deployL2StandardERC20Gateway() internal {
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(placeholder),
address(proxyAdmin),
new bytes(0)
);
logAddress("L2_STANDARD_ERC20_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL2ETHGateway() internal {
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(placeholder),
address(proxyAdmin),
new bytes(0)
);
logAddress("L2_ETH_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL2WETHGateway() internal {
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(placeholder),
address(proxyAdmin),
new bytes(0)
);
logAddress("L2_WETH_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL2CustomERC20Gateway() internal {
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(placeholder),
address(proxyAdmin),
new bytes(0)
);
logAddress("L2_CUSTOM_ERC20_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL2ERC721Gateway() internal {
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(placeholder),
address(proxyAdmin),
new bytes(0)
);
logAddress("L2_ERC721_GATEWAY_PROXY_ADDR", address(proxy));
}
function deployL2ERC1155Gateway() internal {
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(placeholder),
address(proxyAdmin),
new bytes(0)
);
logAddress("L2_ERC1155_GATEWAY_PROXY_ADDR", address(proxy));
}
function logAddress(string memory name, address addr) internal view {
console.log(string(abi.encodePacked(name, "=", vm.toString(address(addr)))));
}
}

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";

View File

@@ -1,63 +0,0 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";
import {L1LidoGateway} from "../../src/lido/L1LidoGateway.sol";
import {L2LidoGateway} from "../../src/lido/L2LidoGateway.sol";
// solhint-disable state-visibility
// solhint-disable var-name-mixedcase
contract DeployLidoGateway is Script {
string NETWORK = vm.envString("NETWORK");
uint256 L1_DEPLOYER_PRIVATE_KEY = vm.envUint("L1_DEPLOYER_PRIVATE_KEY");
uint256 L2_DEPLOYER_PRIVATE_KEY = vm.envUint("L2_DEPLOYER_PRIVATE_KEY");
address L1_WSTETH_ADDR = vm.envAddress("L1_WSTETH_ADDR");
address L2_WSTETH_ADDR = vm.envAddress("L2_WSTETH_ADDR");
address L1_SCROLL_MESSENGER_PROXY_ADDR = vm.envAddress("L1_SCROLL_MESSENGER_PROXY_ADDR");
address L1_GATEWAY_ROUTER_PROXY_ADDR = vm.envAddress("L1_GATEWAY_ROUTER_PROXY_ADDR");
address L1_LIDO_GATEWAY_PROXY_ADDR = vm.envAddress("L1_LIDO_GATEWAY_PROXY_ADDR");
address L2_SCROLL_MESSENGER_PROXY_ADDR = vm.envAddress("L2_SCROLL_MESSENGER_PROXY_ADDR");
address L2_GATEWAY_ROUTER_PROXY_ADDR = vm.envAddress("L2_GATEWAY_ROUTER_PROXY_ADDR");
address L2_LIDO_GATEWAY_PROXY_ADDR = vm.envAddress("L2_LIDO_GATEWAY_PROXY_ADDR");
function run() external {
vm.startBroadcast(L2_DEPLOYER_PRIVATE_KEY);
if (keccak256(abi.encodePacked(NETWORK)) == keccak256(abi.encodePacked("L1"))) {
// deploy l1 lido gateway
L1LidoGateway gateway = new L1LidoGateway(
L1_WSTETH_ADDR,
L2_WSTETH_ADDR,
L2_LIDO_GATEWAY_PROXY_ADDR,
L1_GATEWAY_ROUTER_PROXY_ADDR,
L1_SCROLL_MESSENGER_PROXY_ADDR
);
logAddress("L1_LIDO_GATEWAY_IMPLEMENTATION_ADDR", address(gateway));
} else if (keccak256(abi.encodePacked(NETWORK)) == keccak256(abi.encodePacked("L2"))) {
// deploy l2 lido gateway
L2LidoGateway gateway = new L2LidoGateway(
L1_WSTETH_ADDR,
L2_WSTETH_ADDR,
L1_LIDO_GATEWAY_PROXY_ADDR,
L2_GATEWAY_ROUTER_PROXY_ADDR,
L2_SCROLL_MESSENGER_PROXY_ADDR
);
logAddress("L2_LIDO_GATEWAY_IMPLEMENTATION_ADDR", address(gateway));
}
vm.stopBroadcast();
}
function logAddress(string memory name, address addr) internal view {
console.log(string(abi.encodePacked(name, "=", vm.toString(address(addr)))));
}
}

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";

View File

@@ -1,11 +1,8 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {ProxyAdmin} from "@openzeppelin/contracts/proxy/transparent/ProxyAdmin.sol";
import {ITransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol";
import {L1CustomERC20Gateway} from "../../src/L1/gateways/L1CustomERC20Gateway.sol";
import {L1ERC1155Gateway} from "../../src/L1/gateways/L1ERC1155Gateway.sol";
import {L1ERC721Gateway} from "../../src/L1/gateways/L1ERC721Gateway.sol";
@@ -17,14 +14,9 @@ import {L1WETHGateway} from "../../src/L1/gateways/L1WETHGateway.sol";
import {MultipleVersionRollupVerifier} from "../../src/L1/rollup/MultipleVersionRollupVerifier.sol";
import {ScrollChain} from "../../src/L1/rollup/ScrollChain.sol";
import {L1MessageQueue} from "../../src/L1/rollup/L1MessageQueue.sol";
import {L1MessageQueueWithGasPriceOracle} from "../../src/L1/rollup/L1MessageQueueWithGasPriceOracle.sol";
import {L2GasPriceOracle} from "../../src/L1/rollup/L2GasPriceOracle.sol";
import {EnforcedTxGateway} from "../../src/L1/gateways/EnforcedTxGateway.sol";
// solhint-disable max-states-count
// solhint-disable state-visibility
// solhint-disable var-name-mixedcase
contract InitializeL1BridgeContracts is Script {
uint256 L1_DEPLOYER_PRIVATE_KEY = vm.envUint("L1_DEPLOYER_PRIVATE_KEY");
@@ -36,34 +28,23 @@ contract InitializeL1BridgeContracts is Script {
address L1_FEE_VAULT_ADDR = vm.envAddress("L1_FEE_VAULT_ADDR");
address L1_WETH_ADDR = vm.envAddress("L1_WETH_ADDR");
address L1_PROXY_ADMIN_ADDR = vm.envAddress("L1_PROXY_ADMIN_ADDR");
address L1_WHITELIST_ADDR = vm.envAddress("L1_WHITELIST_ADDR");
address L1_SCROLL_CHAIN_PROXY_ADDR = vm.envAddress("L1_SCROLL_CHAIN_PROXY_ADDR");
address L1_SCROLL_CHAIN_IMPLEMENTATION_ADDR = vm.envAddress("L1_SCROLL_CHAIN_IMPLEMENTATION_ADDR");
address L1_MESSAGE_QUEUE_PROXY_ADDR = vm.envAddress("L1_MESSAGE_QUEUE_PROXY_ADDR");
address L1_MESSAGE_QUEUE_IMPLEMENTATION_ADDR = vm.envAddress("L1_MESSAGE_QUEUE_IMPLEMENTATION_ADDR");
address L2_GAS_PRICE_ORACLE_PROXY_ADDR = vm.envAddress("L2_GAS_PRICE_ORACLE_PROXY_ADDR");
address L1_SCROLL_MESSENGER_PROXY_ADDR = vm.envAddress("L1_SCROLL_MESSENGER_PROXY_ADDR");
address L1_SCROLL_MESSENGER_IMPLEMENTATION_ADDR = vm.envAddress("L1_SCROLL_MESSENGER_IMPLEMENTATION_ADDR");
address L1_GATEWAY_ROUTER_PROXY_ADDR = vm.envAddress("L1_GATEWAY_ROUTER_PROXY_ADDR");
address L1_CUSTOM_ERC20_GATEWAY_PROXY_ADDR = vm.envAddress("L1_CUSTOM_ERC20_GATEWAY_PROXY_ADDR");
address L1_CUSTOM_ERC20_GATEWAY_IMPLEMENTATION_ADDR = vm.envAddress("L1_CUSTOM_ERC20_GATEWAY_IMPLEMENTATION_ADDR");
address L1_ERC721_GATEWAY_PROXY_ADDR = vm.envAddress("L1_ERC721_GATEWAY_PROXY_ADDR");
address L1_ERC721_GATEWAY_IMPLEMENTATION_ADDR = vm.envAddress("L1_ERC721_GATEWAY_IMPLEMENTATION_ADDR");
address L1_ERC1155_GATEWAY_PROXY_ADDR = vm.envAddress("L1_ERC1155_GATEWAY_PROXY_ADDR");
address L1_ERC1155_GATEWAY_IMPLEMENTATION_ADDR = vm.envAddress("L1_ERC1155_GATEWAY_IMPLEMENTATION_ADDR");
address L1_ETH_GATEWAY_PROXY_ADDR = vm.envAddress("L1_ETH_GATEWAY_PROXY_ADDR");
address L1_ETH_GATEWAY_IMPLEMENTATION_ADDR = vm.envAddress("L1_ETH_GATEWAY_IMPLEMENTATION_ADDR");
address L1_STANDARD_ERC20_GATEWAY_PROXY_ADDR = vm.envAddress("L1_STANDARD_ERC20_GATEWAY_PROXY_ADDR");
address L1_STANDARD_ERC20_GATEWAY_IMPLEMENTATION_ADDR =
vm.envAddress("L1_STANDARD_ERC20_GATEWAY_IMPLEMENTATION_ADDR");
address L1_WETH_GATEWAY_PROXY_ADDR = vm.envAddress("L1_WETH_GATEWAY_PROXY_ADDR");
address L1_WETH_GATEWAY_IMPLEMENTATION_ADDR = vm.envAddress("L1_WETH_GATEWAY_IMPLEMENTATION_ADDR");
address L1_MULTIPLE_VERSION_ROLLUP_VERIFIER_ADDR = vm.envAddress("L1_MULTIPLE_VERSION_ROLLUP_VERIFIER_ADDR");
address L1_ENFORCED_TX_GATEWAY_PROXY_ADDR = vm.envAddress("L1_ENFORCED_TX_GATEWAY_PROXY_ADDR");
address L2_SCROLL_MESSENGER_PROXY_ADDR = vm.envAddress("L2_SCROLL_MESSENGER_PROXY_ADDR");
address L2_GATEWAY_ROUTER_PROXY_ADDR = vm.envAddress("L2_GATEWAY_ROUTER_PROXY_ADDR");
address L2_CUSTOM_ERC20_GATEWAY_PROXY_ADDR = vm.envAddress("L2_CUSTOM_ERC20_GATEWAY_PROXY_ADDR");
address L2_ERC721_GATEWAY_PROXY_ADDR = vm.envAddress("L2_ERC721_GATEWAY_PROXY_ADDR");
address L2_ERC1155_GATEWAY_PROXY_ADDR = vm.envAddress("L2_ERC1155_GATEWAY_PROXY_ADDR");
@@ -74,25 +55,14 @@ contract InitializeL1BridgeContracts is Script {
address L2_SCROLL_STANDARD_ERC20_FACTORY_ADDR = vm.envAddress("L2_SCROLL_STANDARD_ERC20_FACTORY_ADDR");
function run() external {
ProxyAdmin proxyAdmin = ProxyAdmin(L1_PROXY_ADMIN_ADDR);
vm.startBroadcast(L1_DEPLOYER_PRIVATE_KEY);
// note: we use call upgrade(...) and initialize(...) instead of upgradeAndCall(...),
// otherwise the contract owner would become ProxyAdmin.
// initialize ScrollChain
proxyAdmin.upgrade(
ITransparentUpgradeableProxy(L1_SCROLL_CHAIN_PROXY_ADDR),
L1_SCROLL_CHAIN_IMPLEMENTATION_ADDR
);
ScrollChain(L1_SCROLL_CHAIN_PROXY_ADDR).initialize(
L1_MESSAGE_QUEUE_PROXY_ADDR,
L1_MULTIPLE_VERSION_ROLLUP_VERIFIER_ADDR,
MAX_TX_IN_CHUNK
);
ScrollChain(L1_SCROLL_CHAIN_PROXY_ADDR).addSequencer(L1_COMMIT_SENDER_ADDRESS);
ScrollChain(L1_SCROLL_CHAIN_PROXY_ADDR).addProver(L1_FINALIZE_SENDER_ADDRESS);
@@ -108,13 +78,8 @@ contract InitializeL1BridgeContracts is Script {
);
L2GasPriceOracle(L2_GAS_PRICE_ORACLE_PROXY_ADDR).updateWhitelist(L1_WHITELIST_ADDR);
// initialize L1MessageQueueWithGasPriceOracle
proxyAdmin.upgrade(
ITransparentUpgradeableProxy(L1_MESSAGE_QUEUE_PROXY_ADDR),
L1_MESSAGE_QUEUE_IMPLEMENTATION_ADDR
);
L1MessageQueueWithGasPriceOracle(L1_MESSAGE_QUEUE_PROXY_ADDR).initialize(
// initialize L1MessageQueue
L1MessageQueue(L1_MESSAGE_QUEUE_PROXY_ADDR).initialize(
L1_SCROLL_MESSENGER_PROXY_ADDR,
L1_SCROLL_CHAIN_PROXY_ADDR,
L1_ENFORCED_TX_GATEWAY_PROXY_ADDR,
@@ -122,14 +87,7 @@ contract InitializeL1BridgeContracts is Script {
MAX_L1_MESSAGE_GAS_LIMIT
);
L1MessageQueueWithGasPriceOracle(L1_MESSAGE_QUEUE_PROXY_ADDR).initializeV2();
// initialize L1ScrollMessenger
proxyAdmin.upgrade(
ITransparentUpgradeableProxy(L1_SCROLL_MESSENGER_PROXY_ADDR),
L1_SCROLL_MESSENGER_IMPLEMENTATION_ADDR
);
L1ScrollMessenger(payable(L1_SCROLL_MESSENGER_PROXY_ADDR)).initialize(
L2_SCROLL_MESSENGER_PROXY_ADDR,
L1_FEE_VAULT_ADDR,
@@ -150,11 +108,6 @@ contract InitializeL1BridgeContracts is Script {
);
// initialize L1CustomERC20Gateway
proxyAdmin.upgrade(
ITransparentUpgradeableProxy(L1_CUSTOM_ERC20_GATEWAY_PROXY_ADDR),
L1_CUSTOM_ERC20_GATEWAY_IMPLEMENTATION_ADDR
);
L1CustomERC20Gateway(L1_CUSTOM_ERC20_GATEWAY_PROXY_ADDR).initialize(
L2_CUSTOM_ERC20_GATEWAY_PROXY_ADDR,
L1_GATEWAY_ROUTER_PROXY_ADDR,
@@ -162,30 +115,18 @@ contract InitializeL1BridgeContracts is Script {
);
// initialize L1ERC1155Gateway
proxyAdmin.upgrade(
ITransparentUpgradeableProxy(L1_ERC1155_GATEWAY_PROXY_ADDR),
L1_ERC1155_GATEWAY_IMPLEMENTATION_ADDR
);
L1ERC1155Gateway(L1_ERC1155_GATEWAY_PROXY_ADDR).initialize(
L2_ERC1155_GATEWAY_PROXY_ADDR,
L1_SCROLL_MESSENGER_PROXY_ADDR
);
// initialize L1ERC721Gateway
proxyAdmin.upgrade(
ITransparentUpgradeableProxy(L1_ERC721_GATEWAY_PROXY_ADDR),
L1_ERC721_GATEWAY_IMPLEMENTATION_ADDR
);
L1ERC721Gateway(L1_ERC721_GATEWAY_PROXY_ADDR).initialize(
L2_ERC721_GATEWAY_PROXY_ADDR,
L1_SCROLL_MESSENGER_PROXY_ADDR
);
// initialize L1ETHGateway
proxyAdmin.upgrade(ITransparentUpgradeableProxy(L1_ETH_GATEWAY_PROXY_ADDR), L1_ETH_GATEWAY_IMPLEMENTATION_ADDR);
L1ETHGateway(L1_ETH_GATEWAY_PROXY_ADDR).initialize(
L2_ETH_GATEWAY_PROXY_ADDR,
L1_GATEWAY_ROUTER_PROXY_ADDR,
@@ -193,11 +134,6 @@ contract InitializeL1BridgeContracts is Script {
);
// initialize L1StandardERC20Gateway
proxyAdmin.upgrade(
ITransparentUpgradeableProxy(L1_STANDARD_ERC20_GATEWAY_PROXY_ADDR),
L1_STANDARD_ERC20_GATEWAY_IMPLEMENTATION_ADDR
);
L1StandardERC20Gateway(L1_STANDARD_ERC20_GATEWAY_PROXY_ADDR).initialize(
L2_STANDARD_ERC20_GATEWAY_PROXY_ADDR,
L1_GATEWAY_ROUTER_PROXY_ADDR,
@@ -207,11 +143,6 @@ contract InitializeL1BridgeContracts is Script {
);
// initialize L1WETHGateway
proxyAdmin.upgrade(
ITransparentUpgradeableProxy(L1_WETH_GATEWAY_PROXY_ADDR),
L1_WETH_GATEWAY_IMPLEMENTATION_ADDR
);
L1WETHGateway(payable(L1_WETH_GATEWAY_PROXY_ADDR)).initialize(
L2_WETH_GATEWAY_PROXY_ADDR,
L1_GATEWAY_ROUTER_PROXY_ADDR,

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";

View File

@@ -1,11 +1,8 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {ProxyAdmin} from "@openzeppelin/contracts/proxy/transparent/ProxyAdmin.sol";
import {ITransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol";
import {L2ScrollMessenger} from "../../src/L2/L2ScrollMessenger.sol";
import {L2CustomERC20Gateway} from "../../src/L2/gateways/L2CustomERC20Gateway.sol";
import {L2ERC1155Gateway} from "../../src/L2/gateways/L2ERC1155Gateway.sol";
@@ -20,15 +17,10 @@ import {L1GasPriceOracle} from "../../src/L2/predeploys/L1GasPriceOracle.sol";
import {Whitelist} from "../../src/L2/predeploys/Whitelist.sol";
import {ScrollStandardERC20Factory} from "../../src/libraries/token/ScrollStandardERC20Factory.sol";
// solhint-disable max-states-count
// solhint-disable state-visibility
// solhint-disable var-name-mixedcase
contract InitializeL2BridgeContracts is Script {
uint256 deployerPrivateKey = vm.envUint("L2_DEPLOYER_PRIVATE_KEY");
address L2_WETH_ADDR = vm.envAddress("L2_WETH_ADDR");
address L2_PROXY_ADMIN_ADDR = vm.envAddress("L2_PROXY_ADMIN_ADDR");
address L1_SCROLL_MESSENGER_PROXY_ADDR = vm.envAddress("L1_SCROLL_MESSENGER_PROXY_ADDR");
address L1_GATEWAY_ROUTER_PROXY_ADDR = vm.envAddress("L1_GATEWAY_ROUTER_PROXY_ADDR");
@@ -45,31 +37,18 @@ contract InitializeL2BridgeContracts is Script {
address L2_MESSAGE_QUEUE_ADDR = vm.envAddress("L2_MESSAGE_QUEUE_ADDR");
address L2_SCROLL_MESSENGER_PROXY_ADDR = vm.envAddress("L2_SCROLL_MESSENGER_PROXY_ADDR");
address L2_SCROLL_MESSENGER_IMPLEMENTATION_ADDR = vm.envAddress("L2_SCROLL_MESSENGER_IMPLEMENTATION_ADDR");
address L2_GATEWAY_ROUTER_PROXY_ADDR = vm.envAddress("L2_GATEWAY_ROUTER_PROXY_ADDR");
address L2_CUSTOM_ERC20_GATEWAY_PROXY_ADDR = vm.envAddress("L2_CUSTOM_ERC20_GATEWAY_PROXY_ADDR");
address L2_CUSTOM_ERC20_GATEWAY_IMPLEMENTATION_ADDR = vm.envAddress("L2_CUSTOM_ERC20_GATEWAY_IMPLEMENTATION_ADDR");
address L2_ERC721_GATEWAY_PROXY_ADDR = vm.envAddress("L2_ERC721_GATEWAY_PROXY_ADDR");
address L2_ERC721_GATEWAY_IMPLEMENTATION_ADDR = vm.envAddress("L2_ERC721_GATEWAY_IMPLEMENTATION_ADDR");
address L2_ERC1155_GATEWAY_PROXY_ADDR = vm.envAddress("L2_ERC1155_GATEWAY_PROXY_ADDR");
address L2_ERC1155_GATEWAY_IMPLEMENTATION_ADDR = vm.envAddress("L2_ERC1155_GATEWAY_IMPLEMENTATION_ADDR");
address L2_ETH_GATEWAY_PROXY_ADDR = vm.envAddress("L2_ETH_GATEWAY_PROXY_ADDR");
address L2_ETH_GATEWAY_IMPLEMENTATION_ADDR = vm.envAddress("L2_ETH_GATEWAY_IMPLEMENTATION_ADDR");
address L2_STANDARD_ERC20_GATEWAY_PROXY_ADDR = vm.envAddress("L2_STANDARD_ERC20_GATEWAY_PROXY_ADDR");
address L2_STANDARD_ERC20_GATEWAY_IMPLEMENTATION_ADDR =
vm.envAddress("L2_STANDARD_ERC20_GATEWAY_IMPLEMENTATION_ADDR");
address L2_WETH_GATEWAY_PROXY_ADDR = vm.envAddress("L2_WETH_GATEWAY_PROXY_ADDR");
address L2_WETH_GATEWAY_IMPLEMENTATION_ADDR = vm.envAddress("L2_WETH_GATEWAY_IMPLEMENTATION_ADDR");
address L2_SCROLL_STANDARD_ERC20_FACTORY_ADDR = vm.envAddress("L2_SCROLL_STANDARD_ERC20_FACTORY_ADDR");
function run() external {
ProxyAdmin proxyAdmin = ProxyAdmin(L2_PROXY_ADMIN_ADDR);
vm.startBroadcast(deployerPrivateKey);
// note: we use call upgrade(...) and initialize(...) instead of upgradeAndCall(...),
// otherwise the contract owner would become ProxyAdmin.
// initialize L2MessageQueue
L2MessageQueue(L2_MESSAGE_QUEUE_ADDR).initialize(L2_SCROLL_MESSENGER_PROXY_ADDR);
@@ -80,11 +59,6 @@ contract InitializeL2BridgeContracts is Script {
L1GasPriceOracle(L1_GAS_PRICE_ORACLE_ADDR).updateWhitelist(L2_WHITELIST_ADDR);
// initialize L2ScrollMessenger
proxyAdmin.upgrade(
ITransparentUpgradeableProxy(L2_SCROLL_MESSENGER_PROXY_ADDR),
L2_SCROLL_MESSENGER_IMPLEMENTATION_ADDR
);
L2ScrollMessenger(payable(L2_SCROLL_MESSENGER_PROXY_ADDR)).initialize(L1_SCROLL_MESSENGER_PROXY_ADDR);
// initialize L2GatewayRouter
@@ -94,11 +68,6 @@ contract InitializeL2BridgeContracts is Script {
);
// initialize L2CustomERC20Gateway
proxyAdmin.upgrade(
ITransparentUpgradeableProxy(L2_CUSTOM_ERC20_GATEWAY_PROXY_ADDR),
L2_CUSTOM_ERC20_GATEWAY_IMPLEMENTATION_ADDR
);
L2CustomERC20Gateway(L2_CUSTOM_ERC20_GATEWAY_PROXY_ADDR).initialize(
L1_CUSTOM_ERC20_GATEWAY_PROXY_ADDR,
L2_GATEWAY_ROUTER_PROXY_ADDR,
@@ -106,30 +75,18 @@ contract InitializeL2BridgeContracts is Script {
);
// initialize L2ERC1155Gateway
proxyAdmin.upgrade(
ITransparentUpgradeableProxy(L2_ERC1155_GATEWAY_PROXY_ADDR),
L2_ERC1155_GATEWAY_IMPLEMENTATION_ADDR
);
L2ERC1155Gateway(L2_ERC1155_GATEWAY_PROXY_ADDR).initialize(
L1_ERC1155_GATEWAY_PROXY_ADDR,
L2_SCROLL_MESSENGER_PROXY_ADDR
);
// initialize L2ERC721Gateway
proxyAdmin.upgrade(
ITransparentUpgradeableProxy(L2_ERC721_GATEWAY_PROXY_ADDR),
L2_ERC721_GATEWAY_IMPLEMENTATION_ADDR
);
L2ERC721Gateway(L2_ERC721_GATEWAY_PROXY_ADDR).initialize(
L1_ERC721_GATEWAY_PROXY_ADDR,
L2_SCROLL_MESSENGER_PROXY_ADDR
);
// initialize L2ETHGateway
proxyAdmin.upgrade(ITransparentUpgradeableProxy(L2_ETH_GATEWAY_PROXY_ADDR), L2_ETH_GATEWAY_IMPLEMENTATION_ADDR);
L2ETHGateway(L2_ETH_GATEWAY_PROXY_ADDR).initialize(
L1_ETH_GATEWAY_PROXY_ADDR,
L2_GATEWAY_ROUTER_PROXY_ADDR,
@@ -137,11 +94,6 @@ contract InitializeL2BridgeContracts is Script {
);
// initialize L2StandardERC20Gateway
proxyAdmin.upgrade(
ITransparentUpgradeableProxy(L2_STANDARD_ERC20_GATEWAY_PROXY_ADDR),
L2_STANDARD_ERC20_GATEWAY_IMPLEMENTATION_ADDR
);
L2StandardERC20Gateway(L2_STANDARD_ERC20_GATEWAY_PROXY_ADDR).initialize(
L1_STANDARD_ERC20_GATEWAY_PROXY_ADDR,
L2_GATEWAY_ROUTER_PROXY_ADDR,
@@ -150,11 +102,6 @@ contract InitializeL2BridgeContracts is Script {
);
// initialize L2WETHGateway
proxyAdmin.upgrade(
ITransparentUpgradeableProxy(L2_WETH_GATEWAY_PROXY_ADDR),
L2_WETH_GATEWAY_IMPLEMENTATION_ADDR
);
L2WETHGateway(payable(L2_WETH_GATEWAY_PROXY_ADDR)).initialize(
L1_WETH_GATEWAY_PROXY_ADDR,
L2_GATEWAY_ROUTER_PROXY_ADDR,

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";

View File

@@ -1,202 +0,0 @@
/* eslint-disable node/no-missing-import */
import { ethers } from "ethers";
import Contract from "circomlib/src/evmasm";
import * as constants from "circomlib/src/poseidon_constants";
const N_ROUNDS_F = 8;
const N_ROUNDS_P = [56, 57, 56, 60, 60, 63, 64, 63];
export function createCode(nInputs: number) {
if (nInputs < 1 || nInputs > 8) throw new Error("Invalid number of inputs. Must be 1<=nInputs<=8");
const t = nInputs + 1;
const nRoundsF = N_ROUNDS_F;
const nRoundsP = N_ROUNDS_P[t - 2];
const C = new Contract();
function saveM() {
for (let i = 0; i < t; i++) {
for (let j = 0; j < t; j++) {
C.push(constants.M[t - 2][i][j]);
C.push((1 + i * t + j) * 32);
C.mstore();
}
}
}
function ark(r: number) {
// st, q
for (let i = 0; i < t; i++) {
C.dup(t); // q, st, q
C.push(constants.C[t - 2][r * t + i]); // K, q, st, q
C.dup(2 + i); // st[i], K, q, st, q
C.addmod(); // newSt[i], st, q
C.swap(1 + i); // xx, st, q
C.pop();
}
}
function sigma(p: number) {
// sq, q
C.dup(t); // q, st, q
C.dup(1 + p); // st[p] , q , st, q
C.dup(1); // q, st[p] , q , st, q
C.dup(0); // q, q, st[p] , q , st, q
C.dup(2); // st[p] , q, q, st[p] , q , st, q
C.dup(0); // st[p] , st[p] , q, q, st[p] , q , st, q
C.mulmod(); // st2[p], q, st[p] , q , st, q
C.dup(0); // st2[p], st2[p], q, st[p] , q , st, q
C.mulmod(); // st4[p], st[p] , q , st, q
C.mulmod(); // st5[p], st, q
C.swap(1 + p);
C.pop(); // newst, q
}
function mix() {
C.label("mix");
for (let i = 0; i < t; i++) {
for (let j = 0; j < t; j++) {
if (j === 0) {
C.dup(i + t); // q, newSt, oldSt, q
C.push((1 + i * t + j) * 32);
C.mload(); // M, q, newSt, oldSt, q
C.dup(2 + i + j); // oldSt[j], M, q, newSt, oldSt, q
C.mulmod(); // acc, newSt, oldSt, q
} else {
C.dup(1 + i + t); // q, acc, newSt, oldSt, q
C.push((1 + i * t + j) * 32);
C.mload(); // M, q, acc, newSt, oldSt, q
C.dup(3 + i + j); // oldSt[j], M, q, acc, newSt, oldSt, q
C.mulmod(); // aux, acc, newSt, oldSt, q
C.dup(2 + i + t); // q, aux, acc, newSt, oldSt, q
C.swap(2); // acc, aux, q, newSt, oldSt, q
C.addmod(); // acc, newSt, oldSt, q
}
}
}
for (let i = 0; i < t; i++) {
C.swap(t - i + (t - i - 1));
C.pop();
}
C.push(0);
C.mload();
C.jmp();
}
// Check selector
C.push("0x0100000000000000000000000000000000000000000000000000000000");
C.push(0);
C.calldataload();
C.div();
C.dup(0);
C.push(ethers.utils.keccak256(ethers.utils.toUtf8Bytes(`poseidon(uint256[${nInputs}],uint256)`)).slice(0, 10)); // poseidon(uint256[n],uint256)
C.eq();
C.swap(1);
C.push(ethers.utils.keccak256(ethers.utils.toUtf8Bytes(`poseidon(bytes32[${nInputs}],bytes32)`)).slice(0, 10)); // poseidon(bytes32[n],bytes32)
C.eq();
C.or();
C.jmpi("start");
C.invalid();
C.label("start");
saveM();
C.push("0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001"); // q
// Load t values from the call data.
// The function has a single array param param
// [Selector (4)] [item1 (32)] [item2 (32)] .... [doman (32)]
// Stack positions 0-nInputs.
for (let i = 0; i < nInputs; i++) {
C.push(0x04 + 0x20 * (nInputs - i - 1));
C.calldataload();
}
C.push(0x04 + 0x20 * nInputs);
C.calldataload();
for (let i = 0; i < nRoundsF + nRoundsP; i++) {
ark(i);
if (i < nRoundsF / 2 || i >= nRoundsP + nRoundsF / 2) {
for (let j = 0; j < t; j++) {
sigma(j);
}
} else {
sigma(0);
}
const strLabel = "aferMix" + i;
C._pushLabel(strLabel);
C.push(0);
C.mstore();
C.jmp("mix");
C.label(strLabel);
}
C.push("0x00");
C.mstore(); // Save it to pos 0;
C.push("0x20");
C.push("0x00");
C.return();
mix();
return C.createTxData();
}
export function generateABI(nInputs: number) {
return [
{
constant: true,
inputs: [
{
internalType: `bytes32[${nInputs}]`,
name: "input",
type: `bytes32[${nInputs}]`,
},
{
internalType: "bytes32",
name: "domain",
type: "bytes32",
},
],
name: "poseidon",
outputs: [
{
internalType: "bytes32",
name: "",
type: "bytes32",
},
],
payable: false,
stateMutability: "pure",
type: "function",
},
{
constant: true,
inputs: [
{
internalType: `uint256[${nInputs}]`,
name: "input",
type: `uint256[${nInputs}]`,
},
{
internalType: "uint256",
name: "domain",
type: "uint256",
},
],
name: "poseidon",
outputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
],
payable: false,
stateMutability: "pure",
type: "function",
},
];
}

View File

@@ -27,16 +27,6 @@ import {IMessageDropCallback} from "../libraries/callbacks/IMessageDropCallback.
/// @dev All deposited Ether (including `WETH` deposited throng `L1WETHGateway`) will locked in
/// this contract.
contract L1ScrollMessenger is ScrollMessengerBase, IL1ScrollMessenger {
/*************
* Constants *
*************/
/// @notice The address of Rollup contract.
address public immutable rollup;
/// @notice The address of L1MessageQueue contract.
address public immutable messageQueue;
/***********
* Structs *
***********/
@@ -61,11 +51,11 @@ contract L1ScrollMessenger is ScrollMessengerBase, IL1ScrollMessenger {
/// @notice Mapping from L1 message hash to drop status.
mapping(bytes32 => bool) public isL1MessageDropped;
/// @dev The storage slot used as Rollup contract, which is deprecated now.
address private __rollup;
/// @notice The address of Rollup contract.
address public rollup;
/// @dev The storage slot used as L1MessageQueue contract, which is deprecated now.
address private __messageQueue;
/// @notice The address of L1MessageQueue contract.
address public messageQueue;
/// @notice The maximum number of times each L1 message can be replayed.
uint256 public maxReplayTimes;
@@ -90,25 +80,11 @@ contract L1ScrollMessenger is ScrollMessengerBase, IL1ScrollMessenger {
* Constructor *
***************/
constructor(
address _counterpart,
address _rollup,
address _messageQueue
) ScrollMessengerBase(_counterpart) {
if (_rollup == address(0) || _messageQueue == address(0)) {
revert ErrorZeroAddress();
}
constructor() {
_disableInitializers();
rollup = _rollup;
messageQueue = _messageQueue;
}
/// @notice Initialize the storage of L1ScrollMessenger.
///
/// @dev The parameters `_counterpart`, `_rollup` and `_messageQueue` are no longer used.
///
/// @param _counterpart The address of L2ScrollMessenger contract in L2.
/// @param _feeVault The address of fee vault, which will be used to collect relayer fee.
/// @param _rollup The address of ScrollChain contract.
@@ -119,10 +95,13 @@ contract L1ScrollMessenger is ScrollMessengerBase, IL1ScrollMessenger {
address _rollup,
address _messageQueue
) public initializer {
if (_counterpart == address(0) || _rollup == address(0) || _messageQueue == address(0)) {
revert ErrZeroAddress();
}
ScrollMessengerBase.__ScrollMessengerBase_init(_counterpart, _feeVault);
__rollup = _rollup;
__messageQueue = _messageQueue;
rollup = _rollup;
messageQueue = _messageQueue;
maxReplayTimes = 3;
emit UpdateMaxReplayTimes(0, 3);
@@ -166,8 +145,9 @@ contract L1ScrollMessenger is ScrollMessengerBase, IL1ScrollMessenger {
require(!isL2MessageExecuted[_xDomainCalldataHash], "Message was already successfully executed");
{
require(IScrollChain(rollup).isBatchFinalized(_proof.batchIndex), "Batch is not finalized");
bytes32 _messageRoot = IScrollChain(rollup).withdrawRoots(_proof.batchIndex);
address _rollup = rollup;
require(IScrollChain(_rollup).isBatchFinalized(_proof.batchIndex), "Batch is not finalized");
bytes32 _messageRoot = IScrollChain(_rollup).withdrawRoots(_proof.batchIndex);
require(
WithdrawTrieVerifier.verifyMerkleProof(_messageRoot, _xDomainCalldataHash, _nonce, _proof.merkleProof),
"Invalid proof"
@@ -208,6 +188,8 @@ contract L1ScrollMessenger is ScrollMessengerBase, IL1ScrollMessenger {
// is encoded in the `_message`. We will check the `xDomainCalldata` on layer 2 to avoid duplicated execution.
// So, only one message will succeed on layer 2. If one of the message is executed successfully, the other one
// will revert with "Message was already successfully executed".
address _messageQueue = messageQueue;
address _counterpart = counterpart;
bytes memory _xDomainCalldata = _encodeXDomainCalldata(_from, _to, _value, _messageNonce, _message);
bytes32 _xDomainCalldataHash = keccak256(_xDomainCalldata);
@@ -216,7 +198,7 @@ contract L1ScrollMessenger is ScrollMessengerBase, IL1ScrollMessenger {
require(!isL1MessageDropped[_xDomainCalldataHash], "Message already dropped");
// compute and deduct the messaging fee to fee vault.
uint256 _fee = IL1MessageQueue(messageQueue).estimateCrossDomainMessageFee(_newGasLimit);
uint256 _fee = IL1MessageQueue(_messageQueue).estimateCrossDomainMessageFee(_newGasLimit);
// charge relayer fee
require(msg.value >= _fee, "Insufficient msg.value for fee");
@@ -226,8 +208,8 @@ contract L1ScrollMessenger is ScrollMessengerBase, IL1ScrollMessenger {
}
// enqueue the new transaction
uint256 _nextQueueIndex = IL1MessageQueue(messageQueue).nextCrossDomainMessageIndex();
IL1MessageQueue(messageQueue).appendCrossDomainMessage(counterpart, _newGasLimit, _xDomainCalldata);
uint256 _nextQueueIndex = IL1MessageQueue(_messageQueue).nextCrossDomainMessageIndex();
IL1MessageQueue(_messageQueue).appendCrossDomainMessage(_counterpart, _newGasLimit, _xDomainCalldata);
ReplayState memory _replayState = replayStates[_xDomainCalldataHash];
// update the replayed message chain.
@@ -278,6 +260,8 @@ contract L1ScrollMessenger is ScrollMessengerBase, IL1ScrollMessenger {
//
// We limit the number of `replayMessage` calls of each message, which may solve the above problem.
address _messageQueue = messageQueue;
// check message exists
bytes memory _xDomainCalldata = _encodeXDomainCalldata(_from, _to, _value, _messageNonce, _message);
bytes32 _xDomainCalldataHash = keccak256(_xDomainCalldata);
@@ -293,7 +277,7 @@ contract L1ScrollMessenger is ScrollMessengerBase, IL1ScrollMessenger {
// check message is skipped and drop it.
// @note If the list is very long, the message may never be dropped.
while (true) {
IL1MessageQueue(messageQueue).dropCrossDomainMessage(_lastIndex);
IL1MessageQueue(_messageQueue).dropCrossDomainMessage(_lastIndex);
_lastIndex = prevReplayIndex[_lastIndex];
if (_lastIndex == 0) break;
unchecked {
@@ -335,12 +319,15 @@ contract L1ScrollMessenger is ScrollMessengerBase, IL1ScrollMessenger {
uint256 _gasLimit,
address _refundAddress
) internal nonReentrant {
address _messageQueue = messageQueue; // gas saving
address _counterpart = counterpart; // gas saving
// compute the actual cross domain message calldata.
uint256 _messageNonce = IL1MessageQueue(messageQueue).nextCrossDomainMessageIndex();
uint256 _messageNonce = IL1MessageQueue(_messageQueue).nextCrossDomainMessageIndex();
bytes memory _xDomainCalldata = _encodeXDomainCalldata(_msgSender(), _to, _value, _messageNonce, _message);
// compute and deduct the messaging fee to fee vault.
uint256 _fee = IL1MessageQueue(messageQueue).estimateCrossDomainMessageFee(_gasLimit);
uint256 _fee = IL1MessageQueue(_messageQueue).estimateCrossDomainMessageFee(_gasLimit);
require(msg.value >= _fee + _value, "Insufficient msg.value");
if (_fee > 0) {
(bool _success, ) = feeVault.call{value: _fee}("");
@@ -348,7 +335,7 @@ contract L1ScrollMessenger is ScrollMessengerBase, IL1ScrollMessenger {
}
// append message to L1MessageQueue
IL1MessageQueue(messageQueue).appendCrossDomainMessage(counterpart, _gasLimit, _xDomainCalldata);
IL1MessageQueue(_messageQueue).appendCrossDomainMessage(_counterpart, _gasLimit, _xDomainCalldata);
// record the message hash for future use.
bytes32 _xDomainCalldataHash = keccak256(_xDomainCalldata);

134
contracts/src/L1/ecc.sol Normal file
View File

@@ -0,0 +1,134 @@
// SPDX-License-Identifier: GPL-3.0
pragma solidity >=0.7.0 <0.9.0;
contract Ecc {
/* ECC Functions */
// https://etherscan.io/address/0x41bf00f080ed41fa86201eac56b8afb170d9e36d#code
function ecAdd(uint256[2] memory p0, uint256[2] memory p1) public view
returns (uint256[2] memory retP)
{
uint256[4] memory i = [p0[0], p0[1], p1[0], p1[1]];
assembly {
// call ecadd precompile
// inputs are: x1, y1, x2, y2
if iszero(staticcall(not(0), 0x06, i, 0x80, retP, 0x40)) {
revert(0, 0)
}
}
}
// https://etherscan.io/address/0x41bf00f080ed41fa86201eac56b8afb170d9e36d#code
function ecMul(uint256[2] memory p, uint256 s) public view
returns (uint256[2] memory retP)
{
// With a public key (x, y), this computes p = scalar * (x, y).
uint256[3] memory i = [p[0], p[1], s];
assembly {
// call ecmul precompile
// inputs are: x, y, scalar
if iszero(staticcall(not(0), 0x07, i, 0x60, retP, 0x40)) {
revert(0, 0)
}
}
}
// scroll-tech/scroll/contracts/src/libraries/verifier/RollupVerifier.sol
struct G1Point {
uint256 x;
uint256 y;
}
struct G2Point {
uint256[2] x;
uint256[2] y;
}
function ecPairing(G1Point[] memory p1, G2Point[] memory p2) internal view returns (bool) {
uint256 length = p1.length * 6;
uint256[] memory input = new uint256[](length);
uint256[1] memory result;
bool ret;
require(p1.length == p2.length);
for (uint256 i = 0; i < p1.length; i++) {
input[0 + i * 6] = p1[i].x;
input[1 + i * 6] = p1[i].y;
input[2 + i * 6] = p2[i].x[0];
input[3 + i * 6] = p2[i].x[1];
input[4 + i * 6] = p2[i].y[0];
input[5 + i * 6] = p2[i].y[1];
}
assembly {
ret := staticcall(gas(), 8, add(input, 0x20), mul(length, 0x20), result, 0x20)
}
require(ret);
return result[0] != 0;
}
/* Bench */
function ecAdds(uint256 n) public
{
uint256[2] memory p0;
p0[0] = 1;
p0[1] = 2;
uint256[2] memory p1;
p1[0] = 1;
p1[1] = 2;
for (uint i = 0; i < n; i++) {
ecAdd(p0, p1);
}
}
function ecMuls(uint256 n) public
{
uint256[2] memory p0;
p0[0] = 1;
p0[1] = 2;
for (uint i = 0; i < n; i++) {
ecMul(p0, 3);
}
}
function ecPairings(uint256 n) public
{
G1Point[] memory g1_points = new G1Point[](2);
G2Point[] memory g2_points = new G2Point[](2);
g1_points[0].x = 0x0000000000000000000000000000000000000000000000000000000000000001;
g1_points[0].y = 0x0000000000000000000000000000000000000000000000000000000000000002;
g2_points[0].x[1] = 0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed;
g2_points[0].x[0] = 0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2;
g2_points[0].y[1] = 0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa;
g2_points[0].y[0] = 0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b;
g1_points[1].x = 0x1aa125a22bd902874034e67868aed40267e5575d5919677987e3bc6dd42a32fe;
g1_points[1].y = 0x1bacc186725464068956d9a191455c2d6f6db282d83645c610510d8d4efbaee0;
g2_points[1].x[1] = 0x1b7734c80605f71f1e2de61e998ce5854ff2abebb76537c3d67e50d71422a852;
g2_points[1].x[0] = 0x10d5a1e34b2388a5ebe266033a5e0e63c89084203784da0c6bd9b052a78a2cac;
g2_points[1].y[1] = 0x275739c5c2cdbc72e37c689e2ab441ea76c1d284b9c46ae8f5c42ead937819e1;
g2_points[1].y[0] = 0x018de34c5b7c3d3d75428bbe050f1449ea3d9961d563291f307a1874f7332e65;
for (uint i = 0; i < n; i++) {
ecPairing(g1_points, g2_points);
// bool checked = false;
// checked = ecPairing(g1_points, g2_points);
// require(checked);
}
}
// https://github.com/OpenZeppelin/openzeppelin-contracts/blob/8a0b7bed82d6b8053872c3fd40703efd58f5699d/test/utils/cryptography/ECDSA.test.js#L230
function ecRecovers(uint256 n) public
{
bytes32 hash = 0xb94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9;
bytes32 r = 0xe742ff452d41413616a5bf43fe15dd88294e983d3d36206c2712f39083d638bd;
uint8 v = 0x1b;
bytes32 s = 0xe0a0fc89be718fbc1033e1d30d78be1c68081562ed2e97af876f286f3453231d;
for (uint i = 0; i < n; i++) {
ecrecover(hash, v, r, s);
}
}
}

View File

@@ -66,5 +66,5 @@ interface IL1GatewayRouter is IL1ETHGateway, IL1ERC20Gateway {
/// @dev This function should only be called by contract owner.
/// @param _tokens The list of addresses of tokens to update.
/// @param _gateways The list of addresses of gateways to update.
function setERC20Gateway(address[] calldata _tokens, address[] calldata _gateways) external;
function setERC20Gateway(address[] memory _tokens, address[] memory _gateways) external;
}

View File

@@ -41,32 +41,21 @@ contract L1CustomERC20Gateway is L1ERC20Gateway {
* Constructor *
***************/
/// @notice Constructor for `L1CustomERC20Gateway` implementation contract.
///
/// @param _counterpart The address of `L2USDCGateway` contract in L2.
/// @param _router The address of `L1GatewayRouter` contract in L1.
/// @param _messenger The address of `L1ScrollMessenger` contract L1.
constructor(
address _counterpart,
address _router,
address _messenger
) ScrollGatewayBase(_counterpart, _router, _messenger) {
if (_router == address(0)) revert ErrorZeroAddress();
constructor() {
_disableInitializers();
}
/// @notice Initialize the storage of L1CustomERC20Gateway.
///
/// @dev The parameters `_counterpart`, `_router` and `_messenger` are no longer used.
///
/// @param _counterpart The address of L2CustomERC20Gateway in L2.
/// @param _router The address of L1GatewayRouter in L1.
/// @param _messenger The address of L1ScrollMessenger in L1.
/// @param _router The address of L1GatewayRouter.
/// @param _messenger The address of L1ScrollMessenger.
function initialize(
address _counterpart,
address _router,
address _messenger
) external initializer {
require(_router != address(0), "zero router address");
ScrollGatewayBase._initialize(_counterpart, _router, _messenger);
}

View File

@@ -0,0 +1,11 @@
// SPDX-License-Identifier: MIT
pragma solidity =0.8.16;
import {L1CustomERC20Gateway} from "./L1CustomERC20Gateway.sol";
// solhint-disable no-empty-blocks
contract L1DAIGateway is L1CustomERC20Gateway {
}

View File

@@ -41,17 +41,13 @@ contract L1ERC1155Gateway is ERC1155HolderUpgradeable, ScrollGatewayBase, IL1ERC
* Constructor *
***************/
/// @notice Constructor for `L1ERC1155Gateway` implementation contract.
///
/// @param _counterpart The address of `L1ERC1155Gateway` contract in L2.
/// @param _messenger The address of `L1ScrollMessenger` contract in L1.
constructor(address _counterpart, address _messenger) ScrollGatewayBase(_counterpart, address(0), _messenger) {
constructor() {
_disableInitializers();
}
/// @notice Initialize the storage of L1ERC1155Gateway.
/// @param _counterpart The address of L2ERC1155Gateway in L2.
/// @param _messenger The address of L1ScrollMessenger in L1.
/// @param _messenger The address of L1ScrollMessenger.
function initialize(address _counterpart, address _messenger) external initializer {
ERC1155HolderUpgradeable.__ERC1155Holder_init();
ERC1155ReceiverUpgradeable.__ERC1155Receiver_init();

View File

@@ -41,20 +41,13 @@ contract L1ERC721Gateway is ERC721HolderUpgradeable, ScrollGatewayBase, IL1ERC72
* Constructor *
***************/
/// @notice Constructor for `L2ERC721Gateway` implementation contract.
///
/// @param _counterpart The address of `L2ERC721Gateway` contract in L2.
/// @param _messenger The address of `L1ScrollMessenger` contract in L1.
constructor(address _counterpart, address _messenger) ScrollGatewayBase(_counterpart, address(0), _messenger) {
constructor() {
_disableInitializers();
}
/// @notice Initialize the storage of L1ERC721Gateway.
///
/// @dev The parameters `_counterpart` and `_messenger` are no longer used.
///
/// @param _counterpart The address of L2ERC721Gateway in L2.
/// @param _messenger The address of L1ScrollMessenger in L1.
/// @param _messenger The address of L1ScrollMessenger.
function initialize(address _counterpart, address _messenger) external initializer {
ERC721HolderUpgradeable.__ERC721Holder_init();

View File

@@ -21,33 +21,20 @@ contract L1ETHGateway is ScrollGatewayBase, IL1ETHGateway, IMessageDropCallback
* Constructor *
***************/
/// @notice Constructor for `L1ETHGateway` implementation contract.
///
/// @param _counterpart The address of `L2ETHGateway` contract in L2.
/// @param _router The address of `L1GatewayRouter` contract in L1.
/// @param _messenger The address of `L1ScrollMessenger` contract in L1.
constructor(
address _counterpart,
address _router,
address _messenger
) ScrollGatewayBase(_counterpart, _router, _messenger) {
if (_router == address(0)) revert ErrorZeroAddress();
constructor() {
_disableInitializers();
}
/// @notice Initialize the storage of L1ETHGateway.
///
/// @dev The parameters `_counterpart`, `_router` and `_messenger` are no longer used.
///
/// @param _counterpart The address of L2ETHGateway in L2.
/// @param _router The address of L1GatewayRouter in L1.
/// @param _messenger The address of L1ScrollMessenger in L1.
/// @param _router The address of L1GatewayRouter.
/// @param _messenger The address of L1ScrollMessenger.
function initialize(
address _counterpart,
address _router,
address _messenger
) external initializer {
require(_router != address(0), "zero router address");
ScrollGatewayBase._initialize(_counterpart, _router, _messenger);
}
@@ -133,7 +120,6 @@ contract L1ETHGateway is ScrollGatewayBase, IL1ETHGateway, IMessageDropCallback
// 1. Extract real sender if this call is from L1GatewayRouter.
address _from = _msgSender();
if (router == _from) {
(_from, _data) = abi.decode(_data, (address, bytes));
}

View File

@@ -19,25 +19,15 @@ import {L1ERC20Gateway} from "./L1ERC20Gateway.sol";
/// token will be transfer to the recipient directly. Any ERC20 that requires non-standard functionality
/// should use a separate gateway.
contract L1StandardERC20Gateway is L1ERC20Gateway {
/*************
* Constants *
*************/
/// @notice The address of ScrollStandardERC20 implementation in L2.
address public immutable l2TokenImplementation;
/// @notice The address of ScrollStandardERC20Factory contract in L2.
address public immutable l2TokenFactory;
/*************
* Variables *
*************/
/// @dev The storage slot used as ScrollStandardERC20 implementation in L2, which is deprecated now.
address private __l2TokenImplementation;
/// @notice The address of ScrollStandardERC20 implementation in L2.
address public l2TokenImplementation;
/// @dev The storage slot used as ScrollStandardERC20Factory contract in L2, which is deprecated now.
address private __l2TokenFactory;
/// @notice The address of ScrollStandardERC20Factory contract in L2.
address public l2TokenFactory;
/// @notice Mapping from l1 token address to l2 token address.
/// @dev This is not necessary, since we can compute the address directly. But, we use this mapping
@@ -49,48 +39,33 @@ contract L1StandardERC20Gateway is L1ERC20Gateway {
* Constructor *
***************/
/// @notice Constructor for `L1StandardERC20Gateway` implementation contract.
///
/// @param _counterpart The address of `L2StandardERC20Gateway` contract in L2.
/// @param _router The address of `L1GatewayRouter` contract in L1.
/// @param _messenger The address of `L1ScrollMessenger` contract in L1.
/// @param _l2TokenImplementation The address of `ScrollStandardERC20` implementation in L2.
/// @param _l2TokenFactory The address of `ScrollStandardERC20Factory` contract in L2.
constructor(
constructor() {
_disableInitializers();
}
/// @notice Initialize the storage of L1StandardERC20Gateway.
/// @param _counterpart The address of L2StandardERC20Gateway in L2.
/// @param _router The address of L1GatewayRouter.
/// @param _messenger The address of L1ScrollMessenger.
/// @param _l2TokenImplementation The address of ScrollStandardERC20 implementation in L2.
/// @param _l2TokenFactory The address of ScrollStandardERC20Factory contract in L2.
function initialize(
address _counterpart,
address _router,
address _messenger,
address _l2TokenImplementation,
address _l2TokenFactory
) ScrollGatewayBase(_counterpart, _router, _messenger) {
if (_router == address(0) || _l2TokenImplementation == address(0) || _l2TokenFactory == address(0)) {
revert ErrorZeroAddress();
}
) external initializer {
require(_router != address(0), "zero router address");
ScrollGatewayBase._initialize(_counterpart, _router, _messenger);
_disableInitializers();
require(_l2TokenImplementation != address(0), "zero implementation hash");
require(_l2TokenFactory != address(0), "zero factory address");
l2TokenImplementation = _l2TokenImplementation;
l2TokenFactory = _l2TokenFactory;
}
/// @notice Initialize the storage of L1StandardERC20Gateway.
///
/// @dev The parameters `_counterpart`, `_router`, `_messenger`, `_l2TokenImplementation` and
/// `_l2TokenFactory` are no longer used.
///
/// @param _counterpart The address of L2StandardERC20Gateway in L2.
/// @param _router The address of L1GatewayRouter in L1.
/// @param _messenger The address of L1ScrollMessenger in L1.
function initialize(
address _counterpart,
address _router,
address _messenger,
address, /*_l2TokenImplementation*/
address /*_l2TokenFactory*/
) external initializer {
ScrollGatewayBase._initialize(_counterpart, _router, _messenger);
}
/*************************
* Public View Functions *
*************************/

Some files were not shown because too many files have changed in this diff Show More