Compare commits

...

72 Commits

Author SHA1 Message Date
georgehao
64d6e5fc5c feat: update 2024-04-21 10:19:54 +08:00
georgehao
ce2777943a feat: update 2024-04-21 10:06:36 +08:00
georgehao
74f839e7f3 feat: update 2024-04-21 10:03:06 +08:00
georgehao
005c425a6b feat: update 2024-04-21 09:56:34 +08:00
georgehao
4d3117fea6 feat: update 2024-04-21 09:52:43 +08:00
georgehao
0b4c48061e feat: update 2024-04-21 09:43:32 +08:00
georgehao
1357a1c947 feat: update 2024-04-21 09:02:53 +08:00
georgehao
9dfb082cf4 feat: update 2024-04-20 23:57:43 +08:00
georgehao
dd9892ed25 feat: update 2024-04-20 23:40:28 +08:00
georgehao
ad185c328d feat: update 2024-04-20 23:37:17 +08:00
georgehao
557734ea99 feat: update 2024-04-20 23:34:00 +08:00
georgehao
026030746b feat: update 2024-04-20 23:30:15 +08:00
georgehao
fbcafc260c feat: update 2024-04-20 23:19:20 +08:00
georgehao
3740eb2b3e feat: update 2024-04-20 23:14:09 +08:00
georgehao
86b852c7ae feat: update 2024-04-20 23:07:58 +08:00
georgehao
4aa7e2d7b0 feat: update 2024-04-20 22:58:17 +08:00
georgehao
8fe045b810 feat: update 2024-04-20 22:54:02 +08:00
georgehao
2ec0e1d1b8 feat: update 2024-04-20 22:49:14 +08:00
georgehao
d49ce4ca4f feat: update 2024-04-20 22:34:52 +08:00
georgehao
23c3b208a3 feat: update 2024-04-20 22:31:32 +08:00
georgehao
7a5e704c69 feat: update 2024-04-20 22:25:06 +08:00
georgehao
a498f55d1e feat: update 2024-04-20 22:20:45 +08:00
georgehao
8d3221900d feat: test 2024-04-20 22:16:51 +08:00
georgehao
59ba4b90d7 feat: update 2024-04-20 22:13:23 +08:00
georgehao
609a4b92e5 feat: update 2024-04-20 22:09:35 +08:00
georgehao
fa813dbfc4 feat: update 2024-04-20 21:54:52 +08:00
georgehao
100124985a feat: update 2024-04-20 21:50:38 +08:00
georgehao
8fc60d1d3a feat: update 2024-04-20 19:58:56 +08:00
georgehao
3479f1d7d2 feat: upddate 2024-04-20 19:05:44 +08:00
georgehao
06af04827c feat: update 2024-04-20 00:35:28 +08:00
georgehao
ec5b3c3a18 feat: update 2024-04-20 00:18:03 +08:00
georgehao
f535dd977f feat: update 2024-04-20 00:14:11 +08:00
georgehao
f0e34b8141 feat: update 2024-04-19 23:57:34 +08:00
Mengran Lan
53cf26597d perf(db): add indexes for table prover_task, columns respectively: task_id, created_at (#1283) 2024-04-19 16:54:13 +08:00
sbaizet
f0f7341271 ci - Docker support for arm64 on all services (#1278) 2024-04-16 10:06:29 +02:00
sbaizet
b6af88c936 ci - Docker support for arm64 (#1276) 2024-04-15 16:18:25 +02:00
sbaizet
de541a650a ci - fix github action to support arm64 platform (#1275) 2024-04-15 15:35:18 +02:00
colin
d7a57235d3 fix(rollup-relayer): tweak logs (#1274)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
2024-04-15 14:07:26 +08:00
sbaizet
91d21301ec ci: add support for arm64 on event-watcher images (#1269) 2024-04-14 16:27:59 +08:00
Daniel Helm
4b32a44a70 docs(coordinator): fix internal links to config files in README (#1272) 2024-04-14 16:27:17 +08:00
georgehao
55b400c5fb fix: rollup make lint failure (#1273) 2024-04-14 16:26:35 +08:00
JayLiu
1b49091207 test: fix testcontainers listen ports (#1270)
Co-authored-by: liuyuecai <liuyuecai1995@gmail.com>
2024-04-13 13:30:22 +08:00
HAOYUatHZ
5b827c3c18 feat(db): add batch_data_hash & blob metadata (#1221)
Co-authored-by: HAOYUatHZ <HAOYUatHZ@users.noreply.github.com>
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
2024-04-12 15:45:06 +08:00
georgehao
6b2eb80aa5 feat: print version info on service startup (#1268)
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
2024-04-12 15:09:26 +08:00
Péter Garamvölgyi
71f88b04f5 fix: add blobHash to challenge and piHash (#1264)
Co-authored-by: Thegaram <Thegaram@users.noreply.github.com>
2024-04-12 11:00:37 +08:00
Zhang Zhuo
bcd9764bcd chore(libzkp): upgrade to v0.10.3 (#1267)
Co-authored-by: Thegaram <Thegaram@users.noreply.github.com>
2024-04-12 10:51:39 +08:00
georgehao
b4f8377a08 fix(coordinator): fix coordinator recover public key inconsistent (#1265)
Co-authored-by: georgehao <georgehao@users.noreply.github.com>
2024-04-12 07:19:31 +08:00
Zhang Zhuo
b52d43caa8 chore(libzkp): upgrade to v0.10.2 (#1257)
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
Co-authored-by: Thegaram <Thegaram@users.noreply.github.com>
2024-04-11 13:44:09 +08:00
Mengran Lan
201bf401cd feat(coordinator): add new metric get_task_counter tracking prover info (#1235)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
2024-04-11 12:47:03 +08:00
georgehao
898ac1d25c feat: update batch/chunk proving status when finalize without proof (#1255)
Co-authored-by: georgehao <georgehao@users.noreply.github.com>
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2024-04-11 11:15:09 +08:00
Snoppy
1336b89fb8 chore: fix typos (#1244)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2024-04-11 11:02:32 +08:00
Zhang Zhuo
73045df037 feat(coordinator): support multiple batch verifier versions (#1249)
Co-authored-by: georgehao <haohongfan@gmail.com>
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
2024-04-11 10:55:58 +08:00
Sina Pilehchiha
b3093e9eb6 fix: Implemented fixes for 4844 support fix review. (#1256) 2024-04-10 06:05:29 -04:00
colin
3d5250e52d feat(bridge-history): add puffer gateways (#1252)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
2024-04-09 19:34:03 +08:00
HAOYUatHZ
b7324c76bc fix(test,verifier): fix TestFFI (#1250) 2024-04-09 16:47:43 +08:00
Péter Garamvölgyi
6d6e98bd6e test(codecv1): add zkEVM standard challenge digest test vectors (#1213)
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
Co-authored-by: Rohit Narurkar <rohit.narurkar@proton.me>
Co-authored-by: colinlyguo <colinlyguo@scroll.io>
2024-04-08 11:31:41 +08:00
JayLiu
9e35ce0ab4 test: add testcontainers (#1248) 2024-04-08 00:10:14 +08:00
colin
b86ebaefaf fix(scroll): bump version (#1246) 2024-04-07 14:26:59 +08:00
JayLiu
78a4298eda test: add testcontainers (#1229)
Co-authored-by: liuyuecai <liuyuecai@360.cn>
2024-04-07 14:26:06 +08:00
Zhang Zhuo
49d8387714 chore: upgrade libzkp to v0.10.0rc3, fix sha256 rows (#1245) 2024-04-07 14:20:51 +08:00
georgehao
af2913903b feat(coordinator): optimize get_task sql (#1228)
Co-authored-by: georgehao <georgehao@users.noreply.github.com>
2024-04-03 12:01:48 +08:00
Xi Lin
f8a7d70872 fix(contracts): fix number of non-skipped l1 messages (#1232)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2024-04-03 11:10:17 +08:00
Zhang Zhuo
790fc44b40 chore(libzkp): upgrade to v0.10.0l, fix keccak overflow (#1231)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
Co-authored-by: HAOYUatHZ <HAOYUatHZ@users.noreply.github.com>
2024-04-03 10:54:46 +08:00
Zhang Zhuo
620c71b16d fix(proving): use ChunkInfo embedded inside ChunkProof for tx_bytes (#1230)
Co-authored-by: HAOYUatHZ <HAOYUatHZ@users.noreply.github.com>
2024-04-03 09:24:06 +08:00
Zhang Zhuo
ed0e0e4c18 feat(libzkp): upgrade to v0.10.0k to support blob DA (#1212)
Co-authored-by: kunxian-xia <xiakunxian130@gmail.com>
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
Co-authored-by: HAOYUatHZ <haoyu@protonmail.com>
Co-authored-by: HAOYUatHZ <HAOYUatHZ@users.noreply.github.com>
2024-04-02 10:09:39 +08:00
JayLiu
d203033e13 test: add testcontainers (#1217)
Co-authored-by: liuyuecai <liuyuecai@360.cn>
2024-04-02 00:52:57 +08:00
colin
7d45926687 fix(codecv1): out of bound when keccak256 hash's pointBigInt.Bytes() length is less than 32 bytes (#1218) 2024-04-01 07:21:32 +08:00
Péter Garamvölgyi
5362e28f74 feat: rename fork to Bernoulli (#1210)
Co-authored-by: Thegaram <Thegaram@users.noreply.github.com>
2024-03-26 17:30:06 +00:00
colin
e8eb7ff8fd feat(rollup-relayer): support codecv1 (#1201)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
2024-03-25 18:09:07 +00:00
georgehao
b01b5819da feat(coordinator): support assigning tasks of different versions to provers (#1182)
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
Co-authored-by: Thegaram <Thegaram@users.noreply.github.com>
2024-03-25 16:51:36 +00:00
Péter Garamvölgyi
cb09024821 fix: add padding to challenge preimage for missing chunks (#1203)
Co-authored-by: Thegaram <Thegaram@users.noreply.github.com>
2024-03-25 12:38:37 +00:00
Xi Lin
8bd4277c13 feat(contracts): 4844 support (#1179) 2024-03-22 18:02:02 +08:00
329 changed files with 10094 additions and 5554 deletions

View File

@@ -85,9 +85,9 @@ jobs:
- name: Install Solc
uses: supplypike/setup-bin@v3
with:
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.16/solc-static-linux'
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.24/solc-static-linux'
name: 'solc'
version: '0.8.16'
version: '0.8.24'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
- name: Build prerequisites

View File

@@ -43,10 +43,10 @@ jobs:
- name: Setup LCOV
uses: hrishikesh-kadam/setup-lcov@v1
- name: Install Node.js 14
- name: Install Node.js 18
uses: actions/setup-node@v2
with:
node-version: '14'
node-version: '18'
- name: Get yarn cache directory path
id: yarn-cache-dir-path
@@ -73,13 +73,13 @@ jobs:
run: yarn install
- name: Compile with foundry
run: forge build
run: forge build --evm-version cancun
- name: Run foundry tests
run: forge test -vvv
run: forge test --evm-version cancun -vvv
- name: Run foundry coverage
run : forge coverage --report lcov
run : forge coverage --evm-version cancun --report lcov
- name : Prune coverage
run : lcov --rc branch_coverage=1 --remove ./lcov.info -o ./lcov.info.pruned 'src/mocks/*' 'src/test/*' 'scripts/*' 'node_modules/*' 'lib/*'
@@ -102,10 +102,10 @@ jobs:
with:
submodules: recursive
- name: Install Node.js 14
- name: Install Node.js 18
uses: actions/setup-node@v2
with:
node-version: '14'
node-version: '18'
- name: Get yarn cache directory path
id: yarn-cache-dir-path

View File

@@ -39,7 +39,7 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.20.x
go-version: 1.21.x
- name: Checkout code
uses: actions/checkout@v2
- name: Lint
@@ -54,7 +54,7 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.20.x
go-version: 1.21.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install goimports
@@ -95,15 +95,15 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.20.x
go-version: 1.21.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install Solc
uses: supplypike/setup-bin@v3
with:
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.16/solc-static-linux'
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.24/solc-static-linux'
name: 'solc'
version: '0.8.16'
version: '0.8.24'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
- name: Build prerequisites

View File

@@ -78,9 +78,9 @@ jobs:
- name: Install Solc
uses: supplypike/setup-bin@v3
with:
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.16/solc-static-linux'
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.24/solc-static-linux'
name: 'solc'
version: '0.8.16'
version: '0.8.24'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
- name: Build prerequisites

View File

@@ -46,6 +46,7 @@ jobs:
with:
context: .
file: ./build/dockerfiles/event_watcher.Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
@@ -90,6 +91,7 @@ jobs:
with:
context: .
file: ./build/dockerfiles/gas_oracle.Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
@@ -134,6 +136,7 @@ jobs:
with:
context: .
file: ./build/dockerfiles/rollup_relayer.Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
@@ -178,6 +181,7 @@ jobs:
with:
context: .
file: ./build/dockerfiles/bridgehistoryapi-fetcher.Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
@@ -222,6 +226,7 @@ jobs:
with:
context: .
file: ./build/dockerfiles/bridgehistoryapi-api.Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
@@ -266,6 +271,7 @@ jobs:
with:
context: .
file: ./build/dockerfiles/coordinator-api.Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
@@ -310,6 +316,7 @@ jobs:
with:
context: .
file: ./build/dockerfiles/coordinator-cron.Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}

View File

@@ -8,7 +8,7 @@ require (
github.com/go-redis/redis/v8 v8.11.5
github.com/pressly/goose/v3 v3.16.0
github.com/prometheus/client_golang v1.16.0
github.com/scroll-tech/go-ethereum v1.10.14-0.20240314095130-4553f5f26935
github.com/scroll-tech/go-ethereum v1.10.14-0.20240326144132-0f0cd99f7a2e
github.com/stretchr/testify v1.9.0
github.com/urfave/cli/v2 v2.25.7
golang.org/x/sync v0.6.0
@@ -60,6 +60,7 @@ require (
github.com/holiman/uint256 v1.2.4 // indirect
github.com/huin/goupnp v1.3.0 // indirect
github.com/iden3/go-iden3-crypto v0.0.15 // indirect
github.com/jackc/pgx/v5 v5.5.4 // indirect
github.com/jackpal/go-nat-pmp v1.0.2 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect

View File

@@ -184,8 +184,8 @@ github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsI
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
github.com/jackc/pgx/v5 v5.5.0 h1:NxstgwndsTRy7eq9/kqYc/BZh5w2hHJV86wjvO+1xPw=
github.com/jackc/pgx/v5 v5.5.0/go.mod h1:Ig06C2Vu0t5qXC60W8sqIthScaEnFvojjj9dSljmHRA=
github.com/jackc/pgx/v5 v5.5.4 h1:Xp2aQS8uXButQdnCMWNmvx6UysWQQC+u1EoizjguY+8=
github.com/jackc/pgx/v5 v5.5.4/go.mod h1:ez9gk+OAat140fv9ErkZDYFWmXLfV+++K0uAOiwgm1A=
github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk=
github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
github.com/jackpal/go-nat-pmp v1.0.2 h1:KzKSgb7qkJvOUTqYl9/Hg/me3pWgBmERKrTGD7BdWus=
@@ -311,8 +311,8 @@ github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240314095130-4553f5f26935 h1:bHBt6sillaT4o/9RjxkVX8pWwvEmu37uWBw4XbCjfzY=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240314095130-4553f5f26935/go.mod h1:7Rz2bh9pn42rGuxjh51CG7HL9SKMG3ZugJkL3emdZx8=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240326144132-0f0cd99f7a2e h1:FcoK0rykAWI+5E7cQM6ALRLd5CmjBTHRvJztRBH2xeM=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240326144132-0f0cd99f7a2e/go.mod h1:7Rz2bh9pn42rGuxjh51CG7HL9SKMG3ZugJkL3emdZx8=
github.com/scroll-tech/zktrie v0.7.1 h1:NrmZNjuBzsbrKePqdHDG+t2cXnimbtezPAFS0+L9ElE=
github.com/scroll-tech/zktrie v0.7.1/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk=
github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=

View File

@@ -23,6 +23,7 @@ type FetcherConfig struct {
DAIGatewayAddr string `json:"DAIGatewayAddr"`
USDCGatewayAddr string `json:"USDCGatewayAddr"`
LIDOGatewayAddr string `json:"LIDOGatewayAddr"`
PufferGatewayAddr string `json:"PufferGatewayAddr"`
ERC721GatewayAddr string `json:"ERC721GatewayAddr"`
ERC1155GatewayAddr string `json:"ERC1155GatewayAddr"`
ScrollChainAddr string `json:"ScrollChainAddr"`

View File

@@ -93,6 +93,11 @@ func NewL1FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
gatewayList = append(gatewayList, common.HexToAddress(cfg.LIDOGatewayAddr))
}
if common.HexToAddress(cfg.PufferGatewayAddr) != (common.Address{}) {
addressList = append(addressList, common.HexToAddress(cfg.PufferGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.PufferGatewayAddr))
}
log.Info("L1 Fetcher configured with the following address list", "addresses", addressList, "gateways", gatewayList)
f := &L1FetcherLogic{

View File

@@ -85,7 +85,12 @@ func NewL2FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
if common.HexToAddress(cfg.LIDOGatewayAddr) != (common.Address{}) {
addressList = append(addressList, common.HexToAddress(cfg.LIDOGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.USDCGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.LIDOGatewayAddr))
}
if common.HexToAddress(cfg.PufferGatewayAddr) != (common.Address{}) {
addressList = append(addressList, common.HexToAddress(cfg.PufferGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.PufferGatewayAddr))
}
log.Info("L2 Fetcher configured with the following address list", "addresses", addressList, "gateways", gatewayList)

View File

@@ -1,62 +1,27 @@
package database
package database_test
import (
"context"
"errors"
"io"
"os"
"testing"
"time"
"github.com/mattn/go-colorable"
"github.com/mattn/go-isatty"
"github.com/scroll-tech/go-ethereum/log"
"github.com/stretchr/testify/assert"
"scroll-tech/common/docker"
"scroll-tech/common/database"
"scroll-tech/common/testcontainers"
"scroll-tech/common/version"
)
func TestGormLogger(t *testing.T) {
output := io.Writer(os.Stderr)
usecolor := (isatty.IsTerminal(os.Stderr.Fd()) || isatty.IsCygwinTerminal(os.Stderr.Fd())) && os.Getenv("TERM") != "dumb"
if usecolor {
output = colorable.NewColorableStderr()
}
ostream := log.StreamHandler(output, log.TerminalFormat(usecolor))
glogger := log.NewGlogHandler(ostream)
// Set log level
glogger.Verbosity(log.LvlTrace)
log.Root().SetHandler(glogger)
var gl gormLogger
gl.gethLogger = log.Root()
gl.Error(context.Background(), "test %s error:%v", "testError", errors.New("test error"))
gl.Warn(context.Background(), "test %s warn:%v", "testWarn", errors.New("test warn"))
gl.Info(context.Background(), "test %s warn:%v", "testInfo", errors.New("test info"))
gl.Trace(context.Background(), time.Now(), func() (string, int64) { return "test trace", 1 }, nil)
}
func TestDB(t *testing.T) {
version.Version = "v4.1.98-aaa-bbb-ccc"
base := docker.NewDockerApp()
base.RunDBImage(t)
dbCfg := &Config{
DSN: base.DBConfig.DSN,
DriverName: base.DBConfig.DriverName,
MaxOpenNum: base.DBConfig.MaxOpenNum,
MaxIdleNum: base.DBConfig.MaxIdleNum,
}
testApps := testcontainers.NewTestcontainerApps()
assert.NoError(t, testApps.StartPostgresContainer())
var err error
db, err := InitDB(dbCfg)
db, err := testApps.GetGormDBClient()
assert.NoError(t, err)
sqlDB, err := Ping(db)
sqlDB, err := database.Ping(db)
assert.NoError(t, err)
assert.NotNil(t, sqlDB)
assert.NoError(t, CloseDB(db))
assert.NoError(t, database.CloseDB(db))
}

View File

@@ -0,0 +1,35 @@
package database
import (
"context"
"errors"
"io"
"os"
"testing"
"time"
"github.com/mattn/go-colorable"
"github.com/mattn/go-isatty"
"github.com/scroll-tech/go-ethereum/log"
)
func TestGormLogger(t *testing.T) {
output := io.Writer(os.Stderr)
usecolor := (isatty.IsTerminal(os.Stderr.Fd()) || isatty.IsCygwinTerminal(os.Stderr.Fd())) && os.Getenv("TERM") != "dumb"
if usecolor {
output = colorable.NewColorableStderr()
}
ostream := log.StreamHandler(output, log.TerminalFormat(usecolor))
glogger := log.NewGlogHandler(ostream)
// Set log level
glogger.Verbosity(log.LvlTrace)
log.Root().SetHandler(glogger)
var gl gormLogger
gl.gethLogger = log.Root()
gl.Error(context.Background(), "test %s error:%v", "testError", errors.New("test error"))
gl.Warn(context.Background(), "test %s warn:%v", "testWarn", errors.New("test warn"))
gl.Info(context.Background(), "test %s warn:%v", "testInfo", errors.New("test info"))
gl.Trace(context.Background(), time.Now(), func() (string, int64) { return "test trace", 1 }, nil)
}

View File

@@ -19,7 +19,7 @@ CAPELLA_FORK_VERSION: 0x20000092
MAX_WITHDRAWALS_PER_PAYLOAD: 16
# Deneb
DENEB_FORK_EPOCH: 1
DENEB_FORK_EPOCH: 0
DENEB_FORK_VERSION: 0x20000093
# Time parameters

View File

@@ -19,7 +19,7 @@ services:
command:
- testnet
- generate-genesis
- --fork=capella
- --fork=deneb
- --num-validators=64
- --genesis-time-delay=3
- --output-ssz=/data/consensus/genesis.ssz

View File

@@ -1,196 +0,0 @@
package docker
import (
"crypto/rand"
"database/sql"
"encoding/json"
"fmt"
"math/big"
"os"
"testing"
"time"
"github.com/jmoiron/sqlx"
"github.com/scroll-tech/go-ethereum/ethclient"
"github.com/stretchr/testify/assert"
"scroll-tech/database"
"scroll-tech/common/utils"
)
var (
l1StartPort = 10000
l2StartPort = 20000
dbStartPort = 30000
)
// AppAPI app interface.
type AppAPI interface {
IsRunning() bool
WaitResult(t *testing.T, timeout time.Duration, keyword string) bool
RunApp(waitResult func() bool)
WaitExit()
ExpectWithTimeout(t *testing.T, parallel bool, timeout time.Duration, keyword string)
}
// App is collection struct of runtime docker images
type App struct {
L1gethImg GethImgInstance
L2gethImg GethImgInstance
DBImg ImgInstance
dbClient *sql.DB
DBConfig *database.DBConfig
DBConfigFile string
// common time stamp.
Timestamp int
}
// NewDockerApp returns new instance of dockerApp struct
func NewDockerApp() *App {
timestamp := time.Now().Nanosecond()
app := &App{
Timestamp: timestamp,
L1gethImg: newTestL1Docker(),
L2gethImg: newTestL2Docker(),
DBImg: newTestDBDocker("postgres"),
DBConfigFile: fmt.Sprintf("/tmp/%d_db-config.json", timestamp),
}
if err := app.mockDBConfig(); err != nil {
panic(err)
}
return app
}
// RunImages runs all images togather
func (b *App) RunImages(t *testing.T) {
b.RunDBImage(t)
b.RunL1Geth(t)
b.RunL2Geth(t)
}
// RunDBImage starts postgres docker container.
func (b *App) RunDBImage(t *testing.T) {
if b.DBImg.IsRunning() {
return
}
assert.NoError(t, b.DBImg.Start())
// try 5 times until the db is ready.
ok := utils.TryTimes(10, func() bool {
db, err := sqlx.Open("postgres", b.DBImg.Endpoint())
return err == nil && db != nil && db.Ping() == nil
})
assert.True(t, ok)
}
// Free clear all running images, double check and recycle docker container.
func (b *App) Free() {
if b.L1gethImg.IsRunning() {
_ = b.L1gethImg.Stop()
}
if b.L2gethImg.IsRunning() {
_ = b.L2gethImg.Stop()
}
if b.DBImg.IsRunning() {
_ = b.DBImg.Stop()
_ = os.Remove(b.DBConfigFile)
if !utils.IsNil(b.dbClient) {
_ = b.dbClient.Close()
b.dbClient = nil
}
}
}
// RunL1Geth starts l1geth docker container.
func (b *App) RunL1Geth(t *testing.T) {
if b.L1gethImg.IsRunning() {
return
}
assert.NoError(t, b.L1gethImg.Start())
}
// L1Client returns a ethclient by dialing running l1geth
func (b *App) L1Client() (*ethclient.Client, error) {
if utils.IsNil(b.L1gethImg) {
return nil, fmt.Errorf("l1 geth is not running")
}
client, err := ethclient.Dial(b.L1gethImg.Endpoint())
if err != nil {
return nil, err
}
return client, nil
}
// RunL2Geth starts l2geth docker container.
func (b *App) RunL2Geth(t *testing.T) {
if b.L2gethImg.IsRunning() {
return
}
assert.NoError(t, b.L2gethImg.Start())
}
// L2Client returns a ethclient by dialing running l2geth
func (b *App) L2Client() (*ethclient.Client, error) {
if utils.IsNil(b.L2gethImg) {
return nil, fmt.Errorf("l2 geth is not running")
}
client, err := ethclient.Dial(b.L2gethImg.Endpoint())
if err != nil {
return nil, err
}
return client, nil
}
// DBClient create and return *sql.DB instance.
func (b *App) DBClient(t *testing.T) *sql.DB {
if !utils.IsNil(b.dbClient) {
return b.dbClient
}
var (
cfg = b.DBConfig
err error
)
b.dbClient, err = sql.Open(cfg.DriverName, cfg.DSN)
assert.NoError(t, err)
b.dbClient.SetMaxOpenConns(cfg.MaxOpenNum)
b.dbClient.SetMaxIdleConns(cfg.MaxIdleNum)
assert.NoError(t, b.dbClient.Ping())
return b.dbClient
}
func (b *App) mockDBConfig() error {
b.DBConfig = &database.DBConfig{
DSN: "",
DriverName: "postgres",
MaxOpenNum: 200,
MaxIdleNum: 20,
}
if b.DBImg != nil {
b.DBConfig.DSN = b.DBImg.Endpoint()
}
data, err := json.Marshal(b.DBConfig)
if err != nil {
return err
}
return os.WriteFile(b.DBConfigFile, data, 0644) //nolint:gosec
}
func newTestL1Docker() GethImgInstance {
id, _ := rand.Int(rand.Reader, big.NewInt(2000))
return NewImgGeth("scroll_l1geth", "", "", 0, l1StartPort+int(id.Int64()))
}
func newTestL2Docker() GethImgInstance {
id, _ := rand.Int(rand.Reader, big.NewInt(2000))
return NewImgGeth("scroll_l2geth", "", "", 0, l2StartPort+int(id.Int64()))
}
func newTestDBDocker(driverName string) ImgInstance {
id, _ := rand.Int(rand.Reader, big.NewInt(2000))
return NewImgDB(driverName, "123456", "test_db", dbStartPort+int(id.Int64()))
}

View File

@@ -1,131 +0,0 @@
package docker
import (
"context"
"fmt"
"strings"
"time"
"github.com/docker/docker/api/types/container"
"scroll-tech/common/cmd"
"scroll-tech/common/utils"
)
// ImgDB the postgres image manager.
type ImgDB struct {
image string
name string
id string
dbName string
port int
password string
running bool
cmd *cmd.Cmd
}
// NewImgDB return postgres db img instance.
func NewImgDB(image, password, dbName string, port int) ImgInstance {
img := &ImgDB{
image: image,
name: fmt.Sprintf("%s-%s_%d", image, dbName, port),
password: password,
dbName: dbName,
port: port,
}
img.cmd = cmd.NewCmd("docker", img.prepare()...)
return img
}
// Start postgres db container.
func (i *ImgDB) Start() error {
id := GetContainerID(i.name)
if id != "" {
return fmt.Errorf("container already exist, name: %s", i.name)
}
i.running = i.isOk()
if !i.running {
_ = i.Stop()
return fmt.Errorf("failed to start image: %s", i.image)
}
return nil
}
// Stop the container.
func (i *ImgDB) Stop() error {
if !i.running {
return nil
}
i.running = false
ctx := context.Background()
// stop the running container.
if i.id == "" {
i.id = GetContainerID(i.name)
}
timeoutSec := 3
timeout := container.StopOptions{
Timeout: &timeoutSec,
}
if err := cli.ContainerStop(ctx, i.id, timeout); err != nil {
return err
}
// remove the stopped container.
return cli.ContainerRemove(ctx, i.id, container.RemoveOptions{})
}
// Endpoint return the dsn.
func (i *ImgDB) Endpoint() string {
return fmt.Sprintf("postgres://postgres:%s@localhost:%d/%s?sslmode=disable", i.password, i.port, i.dbName)
}
// IsRunning returns docker container's running status.
func (i *ImgDB) IsRunning() bool {
return i.running
}
func (i *ImgDB) prepare() []string {
cmd := []string{"run", "--rm", "--name", i.name, "-p", fmt.Sprintf("%d:5432", i.port)}
envs := []string{
"-e", "POSTGRES_PASSWORD=" + i.password,
"-e", fmt.Sprintf("POSTGRES_DB=%s", i.dbName),
}
cmd = append(cmd, envs...)
return append(cmd, i.image)
}
func (i *ImgDB) isOk() bool {
keyword := "database system is ready to accept connections"
okCh := make(chan struct{}, 1)
i.cmd.RegistFunc(keyword, func(buf string) {
if strings.Contains(buf, keyword) {
select {
case okCh <- struct{}{}:
default:
return
}
}
})
defer i.cmd.UnRegistFunc(keyword)
// Start cmd in parallel.
i.cmd.RunCmd(true)
select {
case <-okCh:
utils.TryTimes(20, func() bool {
i.id = GetContainerID(i.name)
return i.id != ""
})
case err := <-i.cmd.ErrChan:
if err != nil {
fmt.Printf("failed to start %s, err: %v\n", i.name, err)
}
case <-time.After(time.Second * 20):
return false
}
return i.id != ""
}

View File

@@ -1,174 +0,0 @@
package docker
import (
"context"
"fmt"
"math/big"
"strconv"
"strings"
"time"
"github.com/docker/docker/api/types/container"
"github.com/scroll-tech/go-ethereum/ethclient"
"scroll-tech/common/cmd"
"scroll-tech/common/utils"
)
// ImgGeth the geth image manager include l1geth and l2geth.
type ImgGeth struct {
image string
name string
id string
volume string
ipcPath string
httpPort int
wsPort int
chainID *big.Int
running bool
cmd *cmd.Cmd
}
// NewImgGeth return geth img instance.
func NewImgGeth(image, volume, ipc string, hPort, wPort int) GethImgInstance {
img := &ImgGeth{
image: image,
name: fmt.Sprintf("%s-%d", image, time.Now().Nanosecond()),
volume: volume,
ipcPath: ipc,
httpPort: hPort,
wsPort: wPort,
}
img.cmd = cmd.NewCmd("docker", img.params()...)
return img
}
// Start run image and check if it is running healthily.
func (i *ImgGeth) Start() error {
id := GetContainerID(i.name)
if id != "" {
return fmt.Errorf("container already exist, name: %s", i.name)
}
i.running = i.isOk()
if !i.running {
_ = i.Stop()
return fmt.Errorf("failed to start image: %s", i.image)
}
// try 10 times to get chainID until is ok.
utils.TryTimes(10, func() bool {
client, err := ethclient.Dial(i.Endpoint())
if err == nil && client != nil {
i.chainID, err = client.ChainID(context.Background())
return err == nil && i.chainID != nil
}
return false
})
return nil
}
// IsRunning returns docker container's running status.
func (i *ImgGeth) IsRunning() bool {
return i.running
}
// Endpoint return the connection endpoint.
func (i *ImgGeth) Endpoint() string {
switch true {
case i.httpPort != 0:
return fmt.Sprintf("http://127.0.0.1:%d", i.httpPort)
case i.wsPort != 0:
return fmt.Sprintf("ws://127.0.0.1:%d", i.wsPort)
default:
return i.ipcPath
}
}
// ChainID return chainID.
func (i *ImgGeth) ChainID() *big.Int {
return i.chainID
}
func (i *ImgGeth) isOk() bool {
keyword := "WebSocket enabled"
okCh := make(chan struct{}, 1)
i.cmd.RegistFunc(keyword, func(buf string) {
if strings.Contains(buf, keyword) {
select {
case okCh <- struct{}{}:
default:
return
}
}
})
defer i.cmd.UnRegistFunc(keyword)
// Start cmd in parallel.
i.cmd.RunCmd(true)
select {
case <-okCh:
utils.TryTimes(20, func() bool {
i.id = GetContainerID(i.name)
return i.id != ""
})
case err := <-i.cmd.ErrChan:
if err != nil {
fmt.Printf("failed to start %s, err: %v\n", i.name, err)
}
case <-time.After(time.Second * 10):
return false
}
return i.id != ""
}
// Stop the docker container.
func (i *ImgGeth) Stop() error {
if !i.running {
return nil
}
i.running = false
ctx := context.Background()
// check if container is running, stop the running container.
id := GetContainerID(i.name)
if id != "" {
timeoutSec := 3
timeout := container.StopOptions{
Timeout: &timeoutSec,
}
if err := cli.ContainerStop(ctx, id, timeout); err != nil {
return err
}
i.id = id
}
// remove the stopped container.
return cli.ContainerRemove(ctx, i.id, container.RemoveOptions{})
}
func (i *ImgGeth) params() []string {
cmds := []string{"run", "--rm", "--name", i.name}
var ports []string
if i.httpPort != 0 {
ports = append(ports, []string{"-p", strconv.Itoa(i.httpPort) + ":8545"}...)
}
if i.wsPort != 0 {
ports = append(ports, []string{"-p", strconv.Itoa(i.wsPort) + ":8546"}...)
}
var envs []string
if i.ipcPath != "" {
envs = append(envs, []string{"-e", fmt.Sprintf("IPC_PATH=%s", i.ipcPath)}...)
}
if i.volume != "" {
cmds = append(cmds, []string{"-v", fmt.Sprintf("%s:%s", i.volume, i.volume)}...)
}
cmds = append(cmds, ports...)
cmds = append(cmds, envs...)
return append(cmds, i.image)
}

View File

@@ -1,54 +0,0 @@
package docker_test
import (
"context"
"testing"
"github.com/jmoiron/sqlx"
_ "github.com/lib/pq" //nolint:golint
"github.com/stretchr/testify/assert"
"scroll-tech/common/docker"
)
var (
base *docker.App
)
func TestMain(m *testing.M) {
base = docker.NewDockerApp()
m.Run()
base.Free()
}
func TestDB(t *testing.T) {
base.RunDBImage(t)
db, err := sqlx.Open("postgres", base.DBImg.Endpoint())
assert.NoError(t, err)
assert.NoError(t, db.Ping())
}
func TestL1Geth(t *testing.T) {
base.RunL1Geth(t)
client, err := base.L1Client()
assert.NoError(t, err)
chainID, err := client.ChainID(context.Background())
assert.NoError(t, err)
t.Logf("chainId: %s", chainID.String())
}
func TestL2Geth(t *testing.T) {
base.RunL2Geth(t)
client, err := base.L2Client()
assert.NoError(t, err)
chainID, err := client.ChainID(context.Background())
assert.NoError(t, err)
t.Logf("chainId: %s", chainID.String())
}

View File

@@ -15,7 +15,7 @@
"archimedesBlock": 0,
"shanghaiBlock": 0,
"clique": {
"period": 3,
"period": 1,
"epoch": 30000
},
"scroll": {

View File

@@ -1,6 +1,7 @@
package forks
import (
"math"
"math/big"
"sort"
@@ -8,30 +9,48 @@ import (
)
// CollectSortedForkHeights returns a sorted set of block numbers that one or more forks are activated on
func CollectSortedForkHeights(config *params.ChainConfig) ([]uint64, map[uint64]bool) {
forkHeightsMap := make(map[uint64]bool)
for _, fork := range []*big.Int{
config.HomesteadBlock,
config.DAOForkBlock,
config.EIP150Block,
config.EIP155Block,
config.EIP158Block,
config.ByzantiumBlock,
config.ConstantinopleBlock,
config.PetersburgBlock,
config.IstanbulBlock,
config.MuirGlacierBlock,
config.BerlinBlock,
config.LondonBlock,
config.ArrowGlacierBlock,
config.ArchimedesBlock,
config.ShanghaiBlock,
func CollectSortedForkHeights(config *params.ChainConfig) ([]uint64, map[uint64]bool, map[string]uint64) {
type nameFork struct {
name string
block *big.Int
}
forkHeightNameMap := make(map[uint64]string)
for _, fork := range []nameFork{
{name: "homestead", block: config.HomesteadBlock},
{name: "daoFork", block: config.DAOForkBlock},
{name: "eip150", block: config.EIP150Block},
{name: "eip155", block: config.EIP155Block},
{name: "eip158", block: config.EIP158Block},
{name: "byzantium", block: config.ByzantiumBlock},
{name: "constantinople", block: config.ConstantinopleBlock},
{name: "petersburg", block: config.PetersburgBlock},
{name: "istanbul", block: config.IstanbulBlock},
{name: "muirGlacier", block: config.MuirGlacierBlock},
{name: "berlin", block: config.BerlinBlock},
{name: "london", block: config.LondonBlock},
{name: "arrowGlacier", block: config.ArrowGlacierBlock},
{name: "archimedes", block: config.ArchimedesBlock},
{name: "shanghai", block: config.ShanghaiBlock},
{name: "bernoulli", block: config.BernoulliBlock},
{name: "curie", block: config.CurieBlock},
} {
if fork == nil {
if fork.block == nil {
continue
} else if height := fork.Uint64(); height != 0 {
forkHeightsMap[height] = true
}
height := fork.block.Uint64()
// only keep latest fork for at each height, discard the rest
forkHeightNameMap[height] = fork.name
}
forkHeightsMap := make(map[uint64]bool)
forkNameHeightMap := make(map[string]uint64)
for height, name := range forkHeightNameMap {
forkHeightsMap[height] = true
forkNameHeightMap[name] = height
}
var forkHeights []uint64
@@ -41,7 +60,7 @@ func CollectSortedForkHeights(config *params.ChainConfig) ([]uint64, map[uint64]
sort.Slice(forkHeights, func(i, j int) bool {
return forkHeights[i] < forkHeights[j]
})
return forkHeights, forkHeightsMap
return forkHeights, forkHeightsMap, forkNameHeightMap
}
// BlocksUntilFork returns the number of blocks until the next fork
@@ -54,3 +73,17 @@ func BlocksUntilFork(blockHeight uint64, forkHeights []uint64) uint64 {
}
return 0
}
// BlockRange returns the block range of the hard fork
// Need ensure the forkHeights is incremental
func BlockRange(currentForkHeight uint64, forkHeights []uint64) (from, to uint64) {
to = math.MaxInt64
for _, height := range forkHeights {
if currentForkHeight < height {
to = height
return
}
from = height
}
return
}

View File

@@ -1,6 +1,7 @@
package forks
import (
"math"
"math/big"
"testing"
@@ -9,20 +10,27 @@ import (
)
func TestCollectSortedForkBlocks(t *testing.T) {
l, m := CollectSortedForkHeights(&params.ChainConfig{
EIP155Block: big.NewInt(4),
EIP158Block: big.NewInt(3),
ByzantiumBlock: big.NewInt(3),
ConstantinopleBlock: big.NewInt(0),
l, m, n := CollectSortedForkHeights(&params.ChainConfig{
ArchimedesBlock: big.NewInt(0),
ShanghaiBlock: big.NewInt(3),
BernoulliBlock: big.NewInt(3),
CurieBlock: big.NewInt(4),
})
require.Equal(t, l, []uint64{
0,
3,
4,
})
require.Equal(t, map[uint64]bool{
3: true,
4: true,
0: true,
}, m)
require.Equal(t, map[string]uint64{
"archimedes": 0,
"bernoulli": 3,
"curie": 4,
}, n)
}
func TestBlocksUntilFork(t *testing.T) {
@@ -64,3 +72,71 @@ func TestBlocksUntilFork(t *testing.T) {
})
}
}
func TestBlockRange(t *testing.T) {
tests := []struct {
name string
forkHeight uint64
forkHeights []uint64
expectedFrom uint64
expectedTo uint64
}{
{
name: "ToInfinite",
forkHeight: 300,
forkHeights: []uint64{100, 200, 300},
expectedFrom: 300,
expectedTo: math.MaxInt64,
},
{
name: "To300",
forkHeight: 200,
forkHeights: []uint64{100, 200, 300},
expectedFrom: 200,
expectedTo: 300,
},
{
name: "To200",
forkHeight: 100,
forkHeights: []uint64{100, 200, 300},
expectedFrom: 100,
expectedTo: 200,
},
{
name: "To100",
forkHeight: 0,
forkHeights: []uint64{100, 200, 300},
expectedFrom: 0,
expectedTo: 100,
},
{
name: "To200-1",
forkHeight: 100,
forkHeights: []uint64{100, 200},
expectedFrom: 100,
expectedTo: 200,
},
{
name: "To2",
forkHeight: 1,
forkHeights: []uint64{1, 2},
expectedFrom: 1,
expectedTo: 2,
},
{
name: "ToInfinite-1",
forkHeight: 0,
forkHeights: []uint64{0},
expectedFrom: 0,
expectedTo: math.MaxInt64,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
from, to := BlockRange(test.forkHeight, test.forkHeights)
require.Equal(t, test.expectedFrom, from)
require.Equal(t, test.expectedTo, to)
})
}
}

View File

@@ -9,17 +9,16 @@ require (
github.com/docker/docker v25.0.3+incompatible
github.com/gin-contrib/pprof v1.4.0
github.com/gin-gonic/gin v1.9.1
github.com/jmoiron/sqlx v1.3.5
github.com/lib/pq v1.10.9
github.com/mattn/go-colorable v0.1.13
github.com/mattn/go-isatty v0.0.20
github.com/modern-go/reflect2 v1.0.2
github.com/orcaman/concurrent-map v1.0.0
github.com/prometheus/client_golang v1.16.0
github.com/scroll-tech/go-ethereum v1.10.14-0.20240314095130-4553f5f26935
github.com/scroll-tech/go-ethereum v1.10.14-0.20240326144132-0f0cd99f7a2e
github.com/stretchr/testify v1.9.0
github.com/testcontainers/testcontainers-go v0.29.1
github.com/testcontainers/testcontainers-go/modules/compose v0.29.1
github.com/testcontainers/testcontainers-go v0.28.0
github.com/testcontainers/testcontainers-go/modules/compose v0.28.0
github.com/testcontainers/testcontainers-go/modules/postgres v0.28.0
github.com/urfave/cli/v2 v2.25.7
gorm.io/driver/postgres v1.5.0
gorm.io/gorm v1.25.5
@@ -127,7 +126,7 @@ require (
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect
github.com/jackc/pgx/v5 v5.5.0 // indirect
github.com/jackc/pgx/v5 v5.5.4 // indirect
github.com/jackc/puddle/v2 v2.2.1 // indirect
github.com/jackpal/go-nat-pmp v1.0.2 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
@@ -144,7 +143,6 @@ require (
github.com/mailru/easyjson v0.7.6 // indirect
github.com/mattn/go-runewidth v0.0.15 // indirect
github.com/mattn/go-shellwords v1.0.12 // indirect
github.com/mattn/go-sqlite3 v1.14.16 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b // indirect
github.com/miekg/pkcs11 v1.1.1 // indirect

View File

@@ -268,7 +268,6 @@ github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXS
github.com/go-playground/validator/v10 v10.15.5 h1:LEBecTWb/1j5TNY1YYG2RcOUN3R7NLylN+x8TTueE24=
github.com/go-playground/validator/v10 v10.15.5/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
github.com/go-sql-driver/mysql v1.3.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-sql-driver/mysql v1.7.1 h1:lUIinVbN1DY0xBg0eMOzmmtGoHwWBbvnWubQUrtU8EI=
github.com/go-sql-driver/mysql v1.7.1/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
@@ -382,8 +381,8 @@ github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5ey
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
github.com/jackc/pgx/v5 v5.3.0/go.mod h1:t3JDKnCBlYIc0ewLF0Q7B8MXmoIaBOZj/ic7iHozM/8=
github.com/jackc/pgx/v5 v5.5.0 h1:NxstgwndsTRy7eq9/kqYc/BZh5w2hHJV86wjvO+1xPw=
github.com/jackc/pgx/v5 v5.5.0/go.mod h1:Ig06C2Vu0t5qXC60W8sqIthScaEnFvojjj9dSljmHRA=
github.com/jackc/pgx/v5 v5.5.4 h1:Xp2aQS8uXButQdnCMWNmvx6UysWQQC+u1EoizjguY+8=
github.com/jackc/pgx/v5 v5.5.4/go.mod h1:ez9gk+OAat140fv9ErkZDYFWmXLfV+++K0uAOiwgm1A=
github.com/jackc/puddle/v2 v2.2.0/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk=
github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
@@ -400,8 +399,6 @@ github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g=
github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ=
github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4=
github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
@@ -443,7 +440,6 @@ github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ic
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
github.com/lib/pq v0.0.0-20150723085316-0dad96c0b94f/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4=
@@ -469,9 +465,6 @@ github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh
github.com/mattn/go-shellwords v1.0.12 h1:M2zGm7EW6UQJvDeQxo4T51eKPurbeFbe8WtebGE2xrk=
github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y=
github.com/mattn/go-sqlite3 v1.6.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/mattn/go-sqlite3 v1.14.16 h1:yOQRA0RpS5PFz/oikGwBEqvAWhWg5ufRz4ETLjwpU1Y=
github.com/mattn/go-sqlite3 v1.14.16/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo=
github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4=
@@ -614,8 +607,8 @@ github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240314095130-4553f5f26935 h1:bHBt6sillaT4o/9RjxkVX8pWwvEmu37uWBw4XbCjfzY=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240314095130-4553f5f26935/go.mod h1:7Rz2bh9pn42rGuxjh51CG7HL9SKMG3ZugJkL3emdZx8=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240326144132-0f0cd99f7a2e h1:FcoK0rykAWI+5E7cQM6ALRLd5CmjBTHRvJztRBH2xeM=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240326144132-0f0cd99f7a2e/go.mod h1:7Rz2bh9pn42rGuxjh51CG7HL9SKMG3ZugJkL3emdZx8=
github.com/scroll-tech/zktrie v0.7.1 h1:NrmZNjuBzsbrKePqdHDG+t2cXnimbtezPAFS0+L9ElE=
github.com/scroll-tech/zktrie v0.7.1/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk=
github.com/secure-systems-lab/go-securesystemslib v0.4.0 h1:b23VGrQhTA8cN2CbBw7/FulN9fTtqYUdS5+Oxzt+DUE=
@@ -678,10 +671,12 @@ github.com/supranational/blst v0.3.11-0.20230124161941-ca03e11a3ff2 h1:wh1wzwAhZ
github.com/supranational/blst v0.3.11-0.20230124161941-ca03e11a3ff2/go.mod h1:jZJtfjgudtNl4en1tzwPIV3KjUnQUvG3/j+w+fVonLw=
github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 h1:epCh84lMvA70Z7CTTCmYQn2CKbY8j86K7/FAIr141uY=
github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7/go.mod h1:q4W45IWZaF22tdD+VEXcAWRA037jwmWEB5VWYORlTpc=
github.com/testcontainers/testcontainers-go v0.29.1 h1:z8kxdFlovA2y97RWx98v/TQ+tR+SXZm6p35M+xB92zk=
github.com/testcontainers/testcontainers-go v0.29.1/go.mod h1:SnKnKQav8UcgtKqjp/AD8bE1MqZm+3TDb/B8crE3XnI=
github.com/testcontainers/testcontainers-go/modules/compose v0.29.1 h1:47ipPM+s+ltCDOP3Sa1j95AkNb+z+WGiHLDbLU8ixuc=
github.com/testcontainers/testcontainers-go/modules/compose v0.29.1/go.mod h1:Sqh+Ef2ESdbJQjTJl57UOkEHkOc7gXvQLg1b5xh6f1Y=
github.com/testcontainers/testcontainers-go v0.28.0 h1:1HLm9qm+J5VikzFDYhOd+Zw12NtOl+8drH2E8nTY1r8=
github.com/testcontainers/testcontainers-go v0.28.0/go.mod h1:COlDpUXbwW3owtpMkEB1zo9gwb1CoKVKlyrVPejF4AU=
github.com/testcontainers/testcontainers-go/modules/compose v0.28.0 h1:QOCeTYZIYixg796Ik60MOaeMgpAKPbQd5pJOdTrftyg=
github.com/testcontainers/testcontainers-go/modules/compose v0.28.0/go.mod h1:lShXm8oldlLck3ltA5u+ShSvUnZ+wiNxwpp8wAQGZ1Y=
github.com/testcontainers/testcontainers-go/modules/postgres v0.28.0 h1:ff0s4JdYIdNAVSi/SrpN2Pdt1f+IjIw3AKjbHau8Un4=
github.com/testcontainers/testcontainers-go/modules/postgres v0.28.0/go.mod h1:fXgcYpbyrduNdiz2qRZuYkmvqLnEqsjbQiBNYH1ystI=
github.com/theupdateframework/notary v0.7.0 h1:QyagRZ7wlSpjT5N2qQAh/pN+DVqgekv4DzbAiAiEL3c=
github.com/theupdateframework/notary v0.7.0/go.mod h1:c9DRxcmhHmVLDay4/2fUYdISnHqbFDGRSlXPO0AhYWw=
github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375 h1:QB54BJwA6x8QU9nHY3xJSZR2kX9bgpZekRKGkLTmEXA=

File diff suppressed because it is too large Load Diff

View File

@@ -8,26 +8,30 @@ edition = "2021"
crate-type = ["cdylib"]
[patch.crates-io]
gobuild = { git = "https://github.com/scroll-tech/gobuild.git" }
halo2curves = { git = "https://github.com/scroll-tech/halo2curves", branch = "v0.1.0" }
ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
ethers-signers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
#ethers-etherscan = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
#ethers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
[patch."https://github.com/privacy-scaling-explorations/halo2.git"]
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "develop" }
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
[patch."https://github.com/privacy-scaling-explorations/poseidon.git"]
poseidon = { git = "https://github.com/scroll-tech/poseidon.git", branch = "scroll-dev-0220" }
[patch."https://github.com/privacy-scaling-explorations/halo2wrong.git"]
halo2wrong = { git = "https://github.com/scroll-tech/halo2wrong.git", branch = "halo2-ecc-snark-verifier-0323" }
maingate = { git = "https://github.com/scroll-tech/halo2wrong", branch = "halo2-ecc-snark-verifier-0323" }
[patch."https://github.com/privacy-scaling-explorations/halo2curves.git"]
halo2curves = { git = "https://github.com/scroll-tech/halo2curves.git", branch = "0.3.1-derive-serde" }
poseidon = { git = "https://github.com/scroll-tech/poseidon.git", branch = "main" }
[patch."https://github.com/privacy-scaling-explorations/bls12_381"]
bls12_381 = { git = "https://github.com/scroll-tech/bls12_381", branch = "feat/impl_scalar_field" }
[dependencies]
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "develop" }
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.9.9", default-features = false, features = ["parallel_syn", "scroll", "shanghai", "strict-ccc"] }
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.10.3", default-features = false, features = ["parallel_syn", "scroll", "shanghai"] }
base64 = "0.13.0"
env_logger = "0.9.0"
libc = "0.2"
log = "0.4"
once_cell = "1.8.0"
once_cell = "1.19"
serde = "1.0"
serde_derive = "1.0"
serde_json = "1.0.66"

View File

@@ -1 +1 @@
nightly-2022-12-10
nightly-2023-12-03

View File

@@ -12,6 +12,7 @@ use prover::{
utils::{chunk_trace_to_witness_block, init_env_and_log},
BatchProof, BlockTrace, ChunkHash, ChunkProof,
};
use snark_verifier_sdk::verify_evm_calldata;
use std::{cell::OnceCell, env, ptr::null};
static mut PROVER: OnceCell<Prover> = OnceCell::new();
@@ -119,7 +120,7 @@ pub unsafe extern "C" fn gen_batch_proof(
let chunk_hashes_proofs = chunk_hashes
.into_iter()
.zip(chunk_proofs.into_iter())
.zip(chunk_proofs)
.collect();
let proof = PROVER
@@ -148,11 +149,33 @@ pub unsafe extern "C" fn gen_batch_proof(
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_batch_proof(proof: *const c_char) -> c_char {
pub unsafe extern "C" fn verify_batch_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
let proof = c_char_to_vec(proof);
let proof = serde_json::from_slice::<BatchProof>(proof.as_slice()).unwrap();
let verified = panic_catch(|| VERIFIER.get().unwrap().verify_agg_evm_proof(proof));
let fork_name_str = c_char_to_str(fork_name);
let fork_id = match fork_name_str {
"" => 0,
"shanghai" => 0,
"bernoulli" => 1,
_ => {
log::warn!("unexpected fork_name {fork_name_str}, treated as bernoulli");
1
}
};
let verified = panic_catch(|| {
if fork_id == 0 {
// before upgrade#2(EIP4844)
verify_evm_calldata(
include_bytes!("evm_verifier_fork_1.bin").to_vec(),
proof.calldata(),
)
} else {
VERIFIER.get().unwrap().verify_agg_evm_proof(proof)
}
});
verified.unwrap_or(false) as c_char
}

Binary file not shown.

View File

@@ -1,5 +1,3 @@
#![feature(once_cell)]
mod batch;
mod chunk;
mod types;

View File

@@ -3,7 +3,7 @@ void init_batch_verifier(char* params_dir, char* assets_dir);
char* get_batch_vk();
char* check_chunk_proofs(char* chunk_proofs);
char* gen_batch_proof(char* chunk_hashes, char* chunk_proofs);
char verify_batch_proof(char* proof);
char verify_batch_proof(char* proof, char* fork_name);
void init_chunk_prover(char* params_dir, char* assets_dir);
void init_chunk_verifier(char* params_dir, char* assets_dir);

View File

@@ -0,0 +1,127 @@
package testcontainers
import (
"context"
"crypto/rand"
"fmt"
"math/big"
"os"
"path/filepath"
"time"
"github.com/cloudflare/cfssl/log"
"github.com/scroll-tech/go-ethereum/ethclient"
tc "github.com/testcontainers/testcontainers-go/modules/compose"
"github.com/testcontainers/testcontainers-go/wait"
)
// PoSL1TestEnv represents the config needed to test in PoS Layer 1.
type PoSL1TestEnv struct {
dockerComposeFile string
compose tc.ComposeStack
gethHTTPPort int
hostPath string
}
// NewPoSL1TestEnv creates and initializes a new instance of PoSL1TestEnv with a random HTTP port.
func NewPoSL1TestEnv() (*PoSL1TestEnv, error) {
rootDir, err := findProjectRootDir()
if err != nil {
return nil, fmt.Errorf("failed to find project root directory: %v", err)
}
hostPath, found := os.LookupEnv("HOST_PATH")
if !found {
hostPath = ""
}
rnd, err := rand.Int(rand.Reader, big.NewInt(65536-1024))
if err != nil {
return nil, fmt.Errorf("failed to generate a random: %v", err)
}
gethHTTPPort := int(rnd.Int64()) + 1024
if err := os.Setenv("GETH_HTTP_PORT", fmt.Sprintf("%d", gethHTTPPort)); err != nil {
return nil, fmt.Errorf("failed to set GETH_HTTP_PORT: %v", err)
}
return &PoSL1TestEnv{
dockerComposeFile: filepath.Join(rootDir, "common", "docker-compose", "l1", "docker-compose.yml"),
gethHTTPPort: gethHTTPPort,
hostPath: hostPath,
}, nil
}
// Start starts the PoS L1 test environment by running the associated Docker Compose configuration.
func (e *PoSL1TestEnv) Start() error {
var err error
e.compose, err = tc.NewDockerCompose([]string{e.dockerComposeFile}...)
if err != nil {
return fmt.Errorf("failed to create docker compose: %w", err)
}
env := map[string]string{
"GETH_HTTP_PORT": fmt.Sprintf("%d", e.gethHTTPPort),
}
if e.hostPath != "" {
env["HOST_PATH"] = e.hostPath
}
if err = e.compose.WaitForService("geth", wait.NewHTTPStrategy("/").WithPort("8545/tcp").WithStartupTimeout(15*time.Second)).WithEnv(env).Up(context.Background()); err != nil {
if errStop := e.Stop(); errStop != nil {
log.Error("failed to stop PoS L1 test environment", "err", errStop)
}
return fmt.Errorf("failed to start PoS L1 test environment: %w", err)
}
return nil
}
// Stop stops the PoS L1 test environment by stopping and removing the associated Docker Compose services.
func (e *PoSL1TestEnv) Stop() error {
if e.compose != nil {
if err := e.compose.Down(context.Background(), tc.RemoveOrphans(true), tc.RemoveVolumes(true), tc.RemoveImagesLocal); err != nil {
return fmt.Errorf("failed to stop PoS L1 test environment: %w", err)
}
}
return nil
}
// Endpoint returns the HTTP endpoint for the PoS L1 test environment.
func (e *PoSL1TestEnv) Endpoint() string {
return fmt.Sprintf("http://127.0.0.1:%d", e.gethHTTPPort)
}
// L1Client returns an ethclient by dialing the running PoS L1 test environment
func (e *PoSL1TestEnv) L1Client() (*ethclient.Client, error) {
if e == nil {
return nil, fmt.Errorf("PoS L1 test environment is not initialized")
}
client, err := ethclient.Dial(e.Endpoint())
if err != nil {
return nil, fmt.Errorf("failed to dial PoS L1 test environment: %w", err)
}
return client, nil
}
func findProjectRootDir() (string, error) {
currentDir, err := os.Getwd()
if err != nil {
return "", fmt.Errorf("failed to get working directory: %w", err)
}
for {
_, err := os.Stat(filepath.Join(currentDir, "go.work"))
if err == nil {
return currentDir, nil
}
parentDir := filepath.Dir(currentDir)
if parentDir == currentDir {
return "", fmt.Errorf("go.work file not found in any parent directory")
}
currentDir = parentDir
}
}

View File

@@ -0,0 +1,200 @@
package testcontainers
import (
"context"
"fmt"
"log"
"time"
"github.com/scroll-tech/go-ethereum/ethclient"
"github.com/testcontainers/testcontainers-go"
"github.com/testcontainers/testcontainers-go/modules/postgres"
"github.com/testcontainers/testcontainers-go/wait"
"gorm.io/gorm"
"scroll-tech/common/database"
)
// TestcontainerApps testcontainers struct
type TestcontainerApps struct {
postgresContainer *postgres.PostgresContainer
l1GethContainer *testcontainers.DockerContainer
l2GethContainer *testcontainers.DockerContainer
// common time stamp in nanoseconds.
Timestamp int
}
// NewTestcontainerApps returns new instance of TestcontainerApps struct
func NewTestcontainerApps() *TestcontainerApps {
timestamp := time.Now().Nanosecond()
return &TestcontainerApps{
Timestamp: timestamp,
}
}
// StartPostgresContainer starts a postgres container
func (t *TestcontainerApps) StartPostgresContainer() error {
if t.postgresContainer != nil && t.postgresContainer.IsRunning() {
return nil
}
postgresContainer, err := postgres.RunContainer(context.Background(),
testcontainers.WithImage("postgres"),
postgres.WithDatabase("test_db"),
postgres.WithPassword("123456"),
testcontainers.WithWaitStrategy(
wait.ForLog("database system is ready to accept connections").WithOccurrence(2).WithStartupTimeout(5*time.Second)),
)
if err != nil {
log.Printf("failed to start postgres container: %s", err)
return err
}
t.postgresContainer = postgresContainer
return nil
}
// StartL1GethContainer starts a L1Geth container
func (t *TestcontainerApps) StartL1GethContainer() error {
if t.l1GethContainer != nil && t.l1GethContainer.IsRunning() {
return nil
}
req := testcontainers.ContainerRequest{
Image: "scroll_l1geth",
ExposedPorts: []string{"8546/tcp", "8545/tcp"},
WaitingFor: wait.ForAll(
wait.ForListeningPort("8546").WithStartupTimeout(100*time.Second),
wait.ForListeningPort("8545").WithStartupTimeout(100*time.Second),
),
Cmd: []string{"--log.debug", "ANY"},
}
genericContainerReq := testcontainers.GenericContainerRequest{
ContainerRequest: req,
Started: true,
}
container, err := testcontainers.GenericContainer(context.Background(), genericContainerReq)
if err != nil {
log.Printf("failed to start scroll_l1geth container: %s", err)
return err
}
t.l1GethContainer, _ = container.(*testcontainers.DockerContainer)
return nil
}
// StartL2GethContainer starts a L2Geth container
func (t *TestcontainerApps) StartL2GethContainer() error {
if t.l2GethContainer != nil && t.l2GethContainer.IsRunning() {
return nil
}
req := testcontainers.ContainerRequest{
Image: "scroll_l2geth",
ExposedPorts: []string{"8546/tcp", "8545/tcp"},
WaitingFor: wait.ForAll(
wait.ForListeningPort("8546").WithStartupTimeout(100*time.Second),
wait.ForListeningPort("8545").WithStartupTimeout(100*time.Second),
),
}
genericContainerReq := testcontainers.GenericContainerRequest{
ContainerRequest: req,
Started: true,
}
container, err := testcontainers.GenericContainer(context.Background(), genericContainerReq)
if err != nil {
log.Printf("failed to start scroll_l2geth container: %s", err)
return err
}
t.l2GethContainer, _ = container.(*testcontainers.DockerContainer)
return nil
}
// GetDBEndPoint returns the endpoint of the running postgres container
func (t *TestcontainerApps) GetDBEndPoint() (string, error) {
if t.postgresContainer == nil || !t.postgresContainer.IsRunning() {
return "", fmt.Errorf("postgres is not running")
}
return t.postgresContainer.ConnectionString(context.Background(), "sslmode=disable")
}
// GetL1GethEndPoint returns the endpoint of the running L1Geth container
func (t *TestcontainerApps) GetL1GethEndPoint() (string, error) {
if t.l1GethContainer == nil || !t.l1GethContainer.IsRunning() {
return "", fmt.Errorf("l1 geth is not running")
}
endpoint, err := t.l1GethContainer.PortEndpoint(context.Background(), "8546/tcp", "ws")
if err != nil {
return "", err
}
return endpoint, nil
}
// GetL2GethEndPoint returns the endpoint of the running L2Geth container
func (t *TestcontainerApps) GetL2GethEndPoint() (string, error) {
if t.l2GethContainer == nil || !t.l2GethContainer.IsRunning() {
return "", fmt.Errorf("l2 geth is not running")
}
endpoint, err := t.l2GethContainer.PortEndpoint(context.Background(), "8546/tcp", "ws")
if err != nil {
return "", err
}
return endpoint, nil
}
// GetGormDBClient returns a gorm.DB by connecting to the running postgres container
func (t *TestcontainerApps) GetGormDBClient() (*gorm.DB, error) {
endpoint, err := t.GetDBEndPoint()
if err != nil {
return nil, err
}
dbCfg := &database.Config{
DSN: endpoint,
DriverName: "postgres",
MaxOpenNum: 200,
MaxIdleNum: 20,
}
return database.InitDB(dbCfg)
}
// GetL1GethClient returns a ethclient by dialing running L1Geth
func (t *TestcontainerApps) GetL1GethClient() (*ethclient.Client, error) {
endpoint, err := t.GetL1GethEndPoint()
if err != nil {
return nil, err
}
client, err := ethclient.Dial(endpoint)
if err != nil {
return nil, err
}
return client, nil
}
// GetL2GethClient returns a ethclient by dialing running L2Geth
func (t *TestcontainerApps) GetL2GethClient() (*ethclient.Client, error) {
endpoint, err := t.GetL2GethEndPoint()
if err != nil {
return nil, err
}
client, err := ethclient.Dial(endpoint)
if err != nil {
return nil, err
}
return client, nil
}
// Free stops all running containers
func (t *TestcontainerApps) Free() {
ctx := context.Background()
if t.postgresContainer != nil && t.postgresContainer.IsRunning() {
if err := t.postgresContainer.Terminate(ctx); err != nil {
log.Printf("failed to stop postgres container: %s", err)
}
}
if t.l1GethContainer != nil && t.l1GethContainer.IsRunning() {
if err := t.l1GethContainer.Terminate(ctx); err != nil {
log.Printf("failed to stop scroll_l1geth container: %s", err)
}
}
if t.l2GethContainer != nil && t.l2GethContainer.IsRunning() {
if err := t.l2GethContainer.Terminate(ctx); err != nil {
log.Printf("failed to stop scroll_l2geth container: %s", err)
}
}
}

View File

@@ -0,0 +1,59 @@
package testcontainers
import (
"testing"
"github.com/scroll-tech/go-ethereum/ethclient"
"github.com/stretchr/testify/assert"
"gorm.io/gorm"
)
// TestNewTestcontainerApps tests NewTestcontainerApps
func TestNewTestcontainerApps(t *testing.T) {
var (
err error
endpoint string
gormDBclient *gorm.DB
ethclient *ethclient.Client
)
// test start testcontainers
testApps := NewTestcontainerApps()
assert.NoError(t, testApps.StartPostgresContainer())
endpoint, err = testApps.GetDBEndPoint()
assert.NoError(t, err)
assert.NotEmpty(t, endpoint)
gormDBclient, err = testApps.GetGormDBClient()
assert.NoError(t, err)
assert.NotNil(t, gormDBclient)
assert.NoError(t, testApps.StartL1GethContainer())
endpoint, err = testApps.GetL1GethEndPoint()
assert.NoError(t, err)
assert.NotEmpty(t, endpoint)
ethclient, err = testApps.GetL1GethClient()
assert.NoError(t, err)
assert.NotNil(t, ethclient)
assert.NoError(t, testApps.StartL2GethContainer())
endpoint, err = testApps.GetL2GethEndPoint()
assert.NoError(t, err)
assert.NotEmpty(t, endpoint)
ethclient, err = testApps.GetL2GethClient()
assert.NoError(t, err)
assert.NotNil(t, ethclient)
// test free testcontainers
testApps.Free()
endpoint, err = testApps.GetDBEndPoint()
assert.EqualError(t, err, "postgres is not running")
assert.Empty(t, endpoint)
endpoint, err = testApps.GetL1GethEndPoint()
assert.EqualError(t, err, "l1 geth is not running")
assert.Empty(t, endpoint)
endpoint, err = testApps.GetL2GethEndPoint()
assert.EqualError(t, err, "l2 geth is not running")
assert.Empty(t, endpoint)
}

View File

@@ -442,9 +442,9 @@ func EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) {
}
// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately.
func EstimateBatchL1CommitCalldataSize(c *encoding.Batch) (uint64, error) {
func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) {
var totalL1CommitCalldataSize uint64
for _, chunk := range c.Chunks {
for _, chunk := range b.Chunks {
chunkL1CommitCalldataSize, err := EstimateChunkL1CommitCalldataSize(chunk)
if err != nil {
return 0, err

View File

@@ -210,6 +210,10 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) {
return nil, fmt.Errorf("too many chunks in batch")
}
if len(batch.Chunks) == 0 {
return nil, fmt.Errorf("too few chunks in batch")
}
// batch data hash
dataHash, err := computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore)
if err != nil {
@@ -223,18 +227,11 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) {
}
// blob payload
blob, z, err := constructBlobPayload(batch.Chunks)
blob, blobVersionedHash, z, err := constructBlobPayload(batch.Chunks)
if err != nil {
return nil, err
}
// blob versioned hash
c, err := kzg4844.BlobToCommitment(*blob)
if err != nil {
return nil, fmt.Errorf("failed to create blob commitment")
}
blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c)
daBatch := DABatch{
Version: CodecV1Version,
BatchIndex: batch.Index,
@@ -277,59 +274,58 @@ func computeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u
}
// constructBlobPayload constructs the 4844 blob payload.
func constructBlobPayload(chunks []*encoding.Chunk) (*kzg4844.Blob, *kzg4844.Point, error) {
func constructBlobPayload(chunks []*encoding.Chunk) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) {
// metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk)
metadataLength := 2 + MaxNumChunks*4
// the raw (un-padded) blob payload
blobBytes := make([]byte, metadataLength)
// the number of chunks that contain at least one L2 transaction
numNonEmptyChunks := 0
// challenge digest preimage
// 1 hash for metadata and 1 for each chunk
challengePreimage := make([]byte, (1+MaxNumChunks)*32)
// 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash
challengePreimage := make([]byte, (1+MaxNumChunks+1)*32)
// the challenge point z
var z kzg4844.Point
// the chunk data hash used for calculating the challenge preimage
var chunkDataHash common.Hash
// blob metadata: num_chunks
binary.BigEndian.PutUint16(blobBytes[0:], uint16(len(chunks)))
// encode blob metadata and L2 transactions,
// and simultaneously also build challenge preimage
for chunkID, chunk := range chunks {
currentChunkStartIndex := len(blobBytes)
hasL2Tx := false
for _, block := range chunk.Blocks {
for _, tx := range block.Transactions {
if tx.Type != types.L1MessageTxType {
hasL2Tx = true
// encode L2 txs into blob payload
rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx)
if err != nil {
return nil, nil, err
return nil, common.Hash{}, nil, err
}
blobBytes = append(blobBytes, rlpTxData...)
continue
}
}
}
// blob metadata: chunki_size
chunkSize := len(blobBytes) - currentChunkStartIndex
binary.BigEndian.PutUint32(blobBytes[2+4*chunkID:], uint32(chunkSize))
if hasL2Tx {
numNonEmptyChunks++
if chunkSize := len(blobBytes) - currentChunkStartIndex; chunkSize != 0 {
binary.BigEndian.PutUint32(blobBytes[2+4*chunkID:], uint32(chunkSize))
}
// challenge: compute chunk data hash
hash := crypto.Keccak256Hash(blobBytes[currentChunkStartIndex:])
copy(challengePreimage[32+chunkID*32:], hash[:])
chunkDataHash = crypto.Keccak256Hash(blobBytes[currentChunkStartIndex:])
copy(challengePreimage[32+chunkID*32:], chunkDataHash[:])
}
// blob metadata: num_chunks
binary.BigEndian.PutUint16(blobBytes[0:], uint16(numNonEmptyChunks))
// if we have fewer than MaxNumChunks chunks, the rest
// of the blob metadata is correctly initialized to 0,
// but we need to add padding to the challenge preimage
for chunkID := len(chunks); chunkID < MaxNumChunks; chunkID++ {
// use the last chunk's data hash as padding
copy(challengePreimage[32+chunkID*32:], chunkDataHash[:])
}
// challenge: compute metadata hash
hash := crypto.Keccak256Hash(blobBytes[0:metadataLength])
@@ -338,15 +334,30 @@ func constructBlobPayload(chunks []*encoding.Chunk) (*kzg4844.Blob, *kzg4844.Poi
// convert raw data to BLSFieldElements
blob, err := makeBlobCanonical(blobBytes)
if err != nil {
return nil, nil, err
return nil, common.Hash{}, nil, err
}
// compute z = challenge_digest % BLS_MODULUS
challengeDigest := crypto.Keccak256Hash(challengePreimage[:])
point := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus)
copy(z[:], point.Bytes()[0:32])
// compute blob versioned hash
c, err := kzg4844.BlobToCommitment(*blob)
if err != nil {
return nil, common.Hash{}, nil, fmt.Errorf("failed to create blob commitment")
}
blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c)
return blob, &z, nil
// challenge: append blob versioned hash
copy(challengePreimage[(1+MaxNumChunks)*32:], blobVersionedHash[:])
// compute z = challenge_digest % BLS_MODULUS
challengeDigest := crypto.Keccak256Hash(challengePreimage)
pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus)
pointBytes := pointBigInt.Bytes()
// the challenge point z
var z kzg4844.Point
start := 32 - len(pointBytes)
copy(z[start:], pointBytes)
return blob, blobVersionedHash, &z, nil
}
// makeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements.
@@ -443,8 +454,55 @@ func (b *DABatch) BlobDataProof() ([]byte, error) {
return BlobDataProofArgs.Pack(values...)
}
// Blob returns the blob of the batch.
func (b *DABatch) Blob() *kzg4844.Blob {
return b.blob
}
// DecodeFromCalldata attempts to decode a DABatch and an array of DAChunks from the provided calldata byte slice.
func DecodeFromCalldata(data []byte) (*DABatch, []*DAChunk, error) {
// TODO: implement this function.
return nil, nil, nil
}
// EstimateChunkL1CommitBlobSize estimates the size of the L1 commit blob for a single chunk.
func EstimateChunkL1CommitBlobSize(c *encoding.Chunk) (uint64, error) {
metadataSize := uint64(2 + 4*MaxNumChunks) // over-estimate: adding metadata length
chunkDataSize, err := chunkL1CommitBlobDataSize(c)
if err != nil {
return 0, err
}
paddedSize := ((metadataSize + chunkDataSize + 30) / 31) * 32
return paddedSize, nil
}
// EstimateBatchL1CommitBlobSize estimates the total size of the L1 commit blob for a batch.
func EstimateBatchL1CommitBlobSize(b *encoding.Batch) (uint64, error) {
metadataSize := uint64(2 + 4*MaxNumChunks)
var batchDataSize uint64
for _, c := range b.Chunks {
chunkDataSize, err := chunkL1CommitBlobDataSize(c)
if err != nil {
return 0, err
}
batchDataSize += chunkDataSize
}
paddedSize := ((metadataSize + batchDataSize + 30) / 31) * 32
return paddedSize, nil
}
func chunkL1CommitBlobDataSize(c *encoding.Chunk) (uint64, error) {
var dataSize uint64
for _, block := range c.Blocks {
for _, tx := range block.Transactions {
if tx.Type != types.L1MessageTxType {
rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx)
if err != nil {
return 0, err
}
dataSize += uint64(len(rlpTxData))
}
}
}
return dataSize, nil
}

File diff suppressed because one or more lines are too long

View File

@@ -6,8 +6,30 @@ import (
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/common/hexutil"
"github.com/scroll-tech/go-ethereum/core/types"
"github.com/scroll-tech/go-ethereum/log"
)
// CodecVersion defines the version of encoder and decoder.
type CodecVersion int
const (
// CodecV0 represents the version 0 of the encoder and decoder.
CodecV0 CodecVersion = iota
// CodecV1 represents the version 1 of the encoder and decoder.
CodecV1
// txTypeTest is a special transaction type used in unit tests.
txTypeTest = 0xff
)
func init() {
// make sure txTypeTest will not interfere with other transaction types
if txTypeTest == types.LegacyTxType || txTypeTest == types.AccessListTxType || txTypeTest == types.DynamicFeeTxType || txTypeTest == types.BlobTxType || txTypeTest == types.L1MessageTxType {
log.Crit("txTypeTest is overlapping with existing transaction types")
}
}
// Block represents an L2 block.
type Block struct {
Header *types.Header
@@ -123,6 +145,10 @@ func ConvertTxDataToRLPEncoding(txData *types.TransactionData) ([]byte, error) {
S: txData.S.ToInt(),
})
case txTypeTest:
// in the tests, we simply use `data` as the RLP-encoded transaction
return data, nil
case types.L1MessageTxType: // L1MessageTxType is not supported
default:
return nil, fmt.Errorf("unsupported tx type: %d", txData.Type)
@@ -209,8 +235,3 @@ func (b *Batch) WithdrawRoot() common.Hash {
lastChunkBlockNum := len(b.Chunks[numChunks-1].Blocks)
return b.Chunks[len(b.Chunks)-1].Blocks[lastChunkBlockNum-1].WithdrawRoot
}
// NumChunks gets the number of chunks of the batch.
func (b *Batch) NumChunks() uint64 {
return uint64(len(b.Chunks))
}

View File

@@ -75,7 +75,6 @@ func TestUtilFunctions(t *testing.T) {
assert.Equal(t, uint64(240000), chunk3.L2GasUsed())
// Test Batch methods
assert.Equal(t, uint64(3), batch.NumChunks())
assert.Equal(t, block6.Header.Root, batch.StateRoot())
assert.Equal(t, block6.WithdrawRoot, batch.WithdrawRoot())
}

View File

@@ -0,0 +1,91 @@
package message
import (
"crypto/ecdsa"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/common/hexutil"
"github.com/scroll-tech/go-ethereum/crypto"
"github.com/scroll-tech/go-ethereum/rlp"
)
// AuthMsg is the first message exchanged from the Prover to the Sequencer.
// It effectively acts as a registration, and makes the Prover identification
// known to the Sequencer.
type AuthMsg struct {
// Message fields
Identity *Identity `json:"message"`
// Prover signature
Signature string `json:"signature"`
}
// Identity contains all the fields to be signed by the prover.
type Identity struct {
// ProverName the prover name
ProverName string `json:"prover_name"`
// ProverVersion the prover version
ProverVersion string `json:"prover_version"`
// Challenge unique challenge generated by manager
Challenge string `json:"challenge"`
// HardForkName the hard fork name
HardForkName string `json:"hard_fork_name"`
}
// SignWithKey auth message with private key and set public key in auth message's Identity
func (a *AuthMsg) SignWithKey(priv *ecdsa.PrivateKey) error {
// Hash identity content
hash, err := a.Identity.Hash()
if err != nil {
return err
}
// Sign register message
sig, err := crypto.Sign(hash, priv)
if err != nil {
return err
}
a.Signature = hexutil.Encode(sig)
return nil
}
// Verify verifies the message of auth.
func (a *AuthMsg) Verify() (bool, error) {
hash, err := a.Identity.Hash()
if err != nil {
return false, err
}
sig := common.FromHex(a.Signature)
pk, err := crypto.SigToPub(hash, sig)
if err != nil {
return false, err
}
return crypto.VerifySignature(crypto.CompressPubkey(pk), hash, sig[:len(sig)-1]), nil
}
// PublicKey return public key from signature
func (a *AuthMsg) PublicKey() (string, error) {
hash, err := a.Identity.Hash()
if err != nil {
return "", err
}
sig := common.FromHex(a.Signature)
// recover public key
pk, err := crypto.SigToPub(hash, sig)
if err != nil {
return "", err
}
return common.Bytes2Hex(crypto.CompressPubkey(pk)), nil
}
// Hash returns the hash of the auth message, which should be the message used
// to construct the Signature.
func (i *Identity) Hash() ([]byte, error) {
byt, err := rlp.EncodeToBytes(i)
if err != nil {
return nil, err
}
hash := crypto.Keccak256Hash(byt)
return hash[:], nil
}

View File

@@ -0,0 +1,89 @@
package message
import (
"crypto/ecdsa"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/common/hexutil"
"github.com/scroll-tech/go-ethereum/crypto"
"github.com/scroll-tech/go-ethereum/rlp"
)
// LegacyAuthMsg is the old auth message exchanged from the Prover to the Sequencer.
// It effectively acts as a registration, and makes the Prover identification
// known to the Sequencer.
type LegacyAuthMsg struct {
// Message fields
Identity *LegacyIdentity `json:"message"`
// Prover signature
Signature string `json:"signature"`
}
// LegacyIdentity contains all the fields to be signed by the prover.
type LegacyIdentity struct {
// ProverName the prover name
ProverName string `json:"prover_name"`
// ProverVersion the prover version
ProverVersion string `json:"prover_version"`
// Challenge unique challenge generated by manager
Challenge string `json:"challenge"`
}
// SignWithKey auth message with private key and set public key in auth message's Identity
func (a *LegacyAuthMsg) SignWithKey(priv *ecdsa.PrivateKey) error {
// Hash identity content
hash, err := a.Identity.Hash()
if err != nil {
return err
}
// Sign register message
sig, err := crypto.Sign(hash, priv)
if err != nil {
return err
}
a.Signature = hexutil.Encode(sig)
return nil
}
// Verify verifies the message of auth.
func (a *LegacyAuthMsg) Verify() (bool, error) {
hash, err := a.Identity.Hash()
if err != nil {
return false, err
}
sig := common.FromHex(a.Signature)
pk, err := crypto.SigToPub(hash, sig)
if err != nil {
return false, err
}
return crypto.VerifySignature(crypto.CompressPubkey(pk), hash, sig[:len(sig)-1]), nil
}
// PublicKey return public key from signature
func (a *LegacyAuthMsg) PublicKey() (string, error) {
hash, err := a.Identity.Hash()
if err != nil {
return "", err
}
sig := common.FromHex(a.Signature)
// recover public key
pk, err := crypto.SigToPub(hash, sig)
if err != nil {
return "", err
}
return common.Bytes2Hex(crypto.CompressPubkey(pk)), nil
}
// Hash returns the hash of the auth message, which should be the message used
// to construct the Signature.
func (i *LegacyIdentity) Hash() ([]byte, error) {
byt, err := rlp.EncodeToBytes(i)
if err != nil {
return nil, err
}
hash := crypto.Keccak256Hash(byt)
return hash[:], nil
}

View File

@@ -58,26 +58,6 @@ const (
ProofTypeBatch
)
// AuthMsg is the first message exchanged from the Prover to the Sequencer.
// It effectively acts as a registration, and makes the Prover identification
// known to the Sequencer.
type AuthMsg struct {
// Message fields
Identity *Identity `json:"message"`
// Prover signature
Signature string `json:"signature"`
}
// Identity contains all the fields to be signed by the prover.
type Identity struct {
// ProverName the prover name
ProverName string `json:"prover_name"`
// ProverVersion the prover version
ProverVersion string `json:"prover_version"`
// Challenge unique challenge generated by manager
Challenge string `json:"challenge"`
}
// GenerateToken generates token
func GenerateToken() (string, error) {
b := make([]byte, 16)
@@ -87,65 +67,6 @@ func GenerateToken() (string, error) {
return hex.EncodeToString(b), nil
}
// SignWithKey auth message with private key and set public key in auth message's Identity
func (a *AuthMsg) SignWithKey(priv *ecdsa.PrivateKey) error {
// Hash identity content
hash, err := a.Identity.Hash()
if err != nil {
return err
}
// Sign register message
sig, err := crypto.Sign(hash, priv)
if err != nil {
return err
}
a.Signature = hexutil.Encode(sig)
return nil
}
// Verify verifies the message of auth.
func (a *AuthMsg) Verify() (bool, error) {
hash, err := a.Identity.Hash()
if err != nil {
return false, err
}
sig := common.FromHex(a.Signature)
pk, err := crypto.SigToPub(hash, sig)
if err != nil {
return false, err
}
return crypto.VerifySignature(crypto.CompressPubkey(pk), hash, sig[:len(sig)-1]), nil
}
// PublicKey return public key from signature
func (a *AuthMsg) PublicKey() (string, error) {
hash, err := a.Identity.Hash()
if err != nil {
return "", err
}
sig := common.FromHex(a.Signature)
// recover public key
pk, err := crypto.SigToPub(hash, sig)
if err != nil {
return "", err
}
return common.Bytes2Hex(crypto.CompressPubkey(pk)), nil
}
// Hash returns the hash of the auth message, which should be the message used
// to construct the Signature.
func (i *Identity) Hash() ([]byte, error) {
byt, err := rlp.EncodeToBytes(i)
if err != nil {
return nil, err
}
hash := crypto.Keccak256Hash(byt)
return hash[:], nil
}
// ProofMsg is the data structure sent to the coordinator.
type ProofMsg struct {
*ProofDetail `json:"zkProof"`
@@ -259,6 +180,7 @@ type ChunkInfo struct {
WithdrawRoot common.Hash `json:"withdraw_root"`
DataHash common.Hash `json:"data_hash"`
IsPadding bool `json:"is_padding"`
TxBytes []byte `json:"tx_bytes"`
}
// ChunkProof includes the proof info that are required for chunk verification and rollup.

View File

@@ -54,7 +54,7 @@ func TestIdentityHash(t *testing.T) {
hash, err := identity.Hash()
assert.NoError(t, err)
expectedHash := "83f5e0ad023e9c1de639ab07b9b4cb972ec9dbbd2524794c533a420a5b137721"
expectedHash := "9b8b00f5655411ec1d68ba1666261281c5414aedbda932e5b6a9f7f1b114fdf2"
assert.Equal(t, expectedHash, hex.EncodeToString(hash))
}

View File

@@ -3,11 +3,16 @@ package utils
import (
"context"
"crypto/rand"
"encoding/json"
"errors"
"fmt"
"math/big"
"os"
"path/filepath"
"time"
"github.com/modern-go/reflect2"
"github.com/scroll-tech/go-ethereum/core"
)
// TryTimes try run several times until the function return true.
@@ -59,3 +64,17 @@ func RandomURL() string {
id, _ := rand.Int(rand.Reader, big.NewInt(5000-1))
return fmt.Sprintf("localhost:%d", 10000+2000+id.Int64())
}
// ReadGenesis parses and returns the genesis file at the given path
func ReadGenesis(genesisPath string) (*core.Genesis, error) {
file, err := os.Open(filepath.Clean(genesisPath))
if err != nil {
return nil, err
}
genesis := new(core.Genesis)
if err := json.NewDecoder(file).Decode(genesis); err != nil {
return nil, errors.Join(err, file.Close())
}
return genesis, file.Close()
}

View File

@@ -5,7 +5,7 @@ import (
"runtime/debug"
)
var tag = "v4.3.74"
var tag = "v4.3.93"
var commit = func() string {
if info, ok := debug.ReadBuildInfo(); ok {

1
contracts/.nvmrc Normal file
View File

@@ -0,0 +1 @@
v18.15.0

View File

@@ -162,7 +162,7 @@ Initialize the storage of L1ERC1155Gateway.
| Name | Type | Description |
|---|---|---|
| _counterpart | address | The address of L2ERC1155Gateway in L2. |
| _messenger | address | The address of L1ScrollMessenger. |
| _messenger | address | The address of L1ScrollMessenger in L1. |
### messenger
@@ -389,12 +389,12 @@ Emitted when the ERC1155 NFT is batch deposited to gateway on layer 1.
| Name | Type | Description |
|---|---|---|
| _l1Token `indexed` | address | undefined |
| _l2Token `indexed` | address | undefined |
| _from `indexed` | address | undefined |
| _to | address | undefined |
| _tokenIds | uint256[] | undefined |
| _amounts | uint256[] | undefined |
| _l1Token `indexed` | address | The address of ERC1155 NFT on layer 1. |
| _l2Token `indexed` | address | The address of ERC1155 NFT on layer 2. |
| _from `indexed` | address | The address of sender on layer 1. |
| _to | address | The address of recipient on layer 2. |
| _tokenIds | uint256[] | The list of token ids of the ERC1155 NFT to deposit on layer 1. |
| _amounts | uint256[] | The list of corresponding number of token to deposit on layer 1. |
### BatchRefundERC1155
@@ -410,10 +410,10 @@ Emitted when some ERC1155 token is refunded.
| Name | Type | Description |
|---|---|---|
| token `indexed` | address | undefined |
| recipient `indexed` | address | undefined |
| tokenIds | uint256[] | undefined |
| amounts | uint256[] | undefined |
| token `indexed` | address | The address of the token in L1. |
| recipient `indexed` | address | The address of receiver in L1. |
| tokenIds | uint256[] | The list of ids of token refunded. |
| amounts | uint256[] | The list of amount of token refunded. |
### DepositERC1155
@@ -429,12 +429,12 @@ Emitted when the ERC1155 NFT is deposited to gateway on layer 1.
| Name | Type | Description |
|---|---|---|
| _l1Token `indexed` | address | undefined |
| _l2Token `indexed` | address | undefined |
| _from `indexed` | address | undefined |
| _to | address | undefined |
| _tokenId | uint256 | undefined |
| _amount | uint256 | undefined |
| _l1Token `indexed` | address | The address of ERC1155 NFT on layer 1. |
| _l2Token `indexed` | address | The address of ERC1155 NFT on layer 2. |
| _from `indexed` | address | The address of sender on layer 1. |
| _to | address | The address of recipient on layer 2. |
| _tokenId | uint256 | The token id of the ERC1155 NFT to deposit on layer 1. |
| _amount | uint256 | The number of token to deposit on layer 1. |
### FinalizeBatchWithdrawERC1155
@@ -450,12 +450,12 @@ Emitted when the ERC1155 NFT is batch transferred to recipient on layer 1.
| Name | Type | Description |
|---|---|---|
| _l1Token `indexed` | address | undefined |
| _l2Token `indexed` | address | undefined |
| _from `indexed` | address | undefined |
| _to | address | undefined |
| _tokenIds | uint256[] | undefined |
| _amounts | uint256[] | undefined |
| _l1Token `indexed` | address | The address of ERC1155 NFT on layer 1. |
| _l2Token `indexed` | address | The address of ERC1155 NFT on layer 2. |
| _from `indexed` | address | The address of sender on layer 2. |
| _to | address | The address of recipient on layer 1. |
| _tokenIds | uint256[] | The list of token ids of the ERC1155 NFT to withdraw from layer 2. |
| _amounts | uint256[] | The list of corresponding number of token to withdraw from layer 2. |
### FinalizeWithdrawERC1155
@@ -471,12 +471,12 @@ Emitted when the ERC1155 NFT is transferred to recipient on layer 1.
| Name | Type | Description |
|---|---|---|
| _l1Token `indexed` | address | undefined |
| _l2Token `indexed` | address | undefined |
| _from `indexed` | address | undefined |
| _to | address | undefined |
| _tokenId | uint256 | undefined |
| _amount | uint256 | undefined |
| _l1Token `indexed` | address | The address of ERC1155 NFT on layer 1. |
| _l2Token `indexed` | address | The address of ERC1155 NFT on layer 2. |
| _from `indexed` | address | The address of sender on layer 2. |
| _to | address | The address of recipient on layer 1. |
| _tokenId | uint256 | The token id of the ERC1155 NFT to withdraw from layer 2. |
| _amount | uint256 | The number of token to withdraw from layer 2. |
### Initialized
@@ -486,7 +486,7 @@ event Initialized(uint8 version)
*Triggered when the contract has been initialized or reinitialized.*
#### Parameters
@@ -525,10 +525,10 @@ Emitted when some ERC1155 token is refunded.
| Name | Type | Description |
|---|---|---|
| token `indexed` | address | undefined |
| recipient `indexed` | address | undefined |
| tokenId | uint256 | undefined |
| amount | uint256 | undefined |
| token `indexed` | address | The address of the token in L1. |
| recipient `indexed` | address | The address of receiver in L1. |
| tokenId | uint256 | The id of token refunded. |
| amount | uint256 | The amount of token refunded. |
### UpdateTokenMapping

View File

@@ -156,7 +156,7 @@ Initialize the storage of L1ERC721Gateway.
| Name | Type | Description |
|---|---|---|
| _counterpart | address | The address of L2ERC721Gateway in L2. |
| _messenger | address | The address of L1ScrollMessenger. |
| _messenger | address | The address of L1ScrollMessenger in L1. |
### messenger
@@ -334,11 +334,11 @@ Emitted when the ERC721 NFT is batch deposited to gateway on layer 1.
| Name | Type | Description |
|---|---|---|
| _l1Token `indexed` | address | undefined |
| _l2Token `indexed` | address | undefined |
| _from `indexed` | address | undefined |
| _to | address | undefined |
| _tokenIds | uint256[] | undefined |
| _l1Token `indexed` | address | The address of ERC721 NFT on layer 1. |
| _l2Token `indexed` | address | The address of ERC721 NFT on layer 2. |
| _from `indexed` | address | The address of sender on layer 1. |
| _to | address | The address of recipient on layer 2. |
| _tokenIds | uint256[] | The list of token ids of the ERC721 NFT to deposit on layer 1. |
### BatchRefundERC721
@@ -354,9 +354,9 @@ Emitted when a batch of ERC721 tokens are refunded.
| Name | Type | Description |
|---|---|---|
| token `indexed` | address | undefined |
| recipient `indexed` | address | undefined |
| tokenIds | uint256[] | undefined |
| token `indexed` | address | The address of the token in L1. |
| recipient `indexed` | address | The address of receiver in L1. |
| tokenIds | uint256[] | The list of token ids of the ERC721 NFT refunded. |
### DepositERC721
@@ -372,11 +372,11 @@ Emitted when the ERC721 NFT is deposited to gateway on layer 1.
| Name | Type | Description |
|---|---|---|
| _l1Token `indexed` | address | undefined |
| _l2Token `indexed` | address | undefined |
| _from `indexed` | address | undefined |
| _to | address | undefined |
| _tokenId | uint256 | undefined |
| _l1Token `indexed` | address | The address of ERC721 NFT on layer 1. |
| _l2Token `indexed` | address | The address of ERC721 NFT on layer 2. |
| _from `indexed` | address | The address of sender on layer 1. |
| _to | address | The address of recipient on layer 2. |
| _tokenId | uint256 | The token id of the ERC721 NFT to deposit on layer 1. |
### FinalizeBatchWithdrawERC721
@@ -392,11 +392,11 @@ Emitted when the ERC721 NFT is batch transferred to recipient on layer 1.
| Name | Type | Description |
|---|---|---|
| _l1Token `indexed` | address | undefined |
| _l2Token `indexed` | address | undefined |
| _from `indexed` | address | undefined |
| _to | address | undefined |
| _tokenIds | uint256[] | undefined |
| _l1Token `indexed` | address | The address of ERC721 NFT on layer 1. |
| _l2Token `indexed` | address | The address of ERC721 NFT on layer 2. |
| _from `indexed` | address | The address of sender on layer 2. |
| _to | address | The address of recipient on layer 1. |
| _tokenIds | uint256[] | The list of token ids of the ERC721 NFT to withdraw from layer 2. |
### FinalizeWithdrawERC721
@@ -412,11 +412,11 @@ Emitted when the ERC721 NFT is transferred to recipient on layer 1.
| Name | Type | Description |
|---|---|---|
| _l1Token `indexed` | address | undefined |
| _l2Token `indexed` | address | undefined |
| _from `indexed` | address | undefined |
| _to | address | undefined |
| _tokenId | uint256 | undefined |
| _l1Token `indexed` | address | The address of ERC721 NFT on layer 1. |
| _l2Token `indexed` | address | The address of ERC721 NFT on layer 2. |
| _from `indexed` | address | The address of sender on layer 2. |
| _to | address | The address of recipient on layer 1. |
| _tokenId | uint256 | The token id of the ERC721 NFT to withdraw from layer 2. |
### Initialized
@@ -426,7 +426,7 @@ event Initialized(uint8 version)
*Triggered when the contract has been initialized or reinitialized.*
#### Parameters
@@ -465,9 +465,9 @@ Emitted when some ERC721 token is refunded.
| Name | Type | Description |
|---|---|---|
| token `indexed` | address | undefined |
| recipient `indexed` | address | undefined |
| tokenId | uint256 | undefined |
| token `indexed` | address | The address of the token in L1. |
| recipient `indexed` | address | The address of receiver in L1. |
| tokenId | uint256 | The id of token refunded. |
### UpdateTokenMapping

View File

@@ -168,7 +168,7 @@ function ethGateway() external view returns (address)
The address of L1ETHGateway.
*This variable is no longer used.*
#### Returns
@@ -286,7 +286,7 @@ function initialize(address _ethGateway, address _defaultERC20Gateway) external
Initialize the storage of L1GatewayRouter.
*The parameters `_ethGateway` is no longer used.*
#### Parameters
@@ -295,23 +295,6 @@ Initialize the storage of L1GatewayRouter.
| _ethGateway | address | The address of L1ETHGateway contract. |
| _defaultERC20Gateway | address | The address of default ERC20 Gateway contract. |
### messenger
```solidity
function messenger() external view returns (address)
```
The address of `L1ScrollMessenger`.
#### Returns
| Name | Type | Description |
|---|---|---|
| _0 | address | undefined |
### owner
```solidity
@@ -447,12 +430,12 @@ Emitted when someone deposit ERC20 token from L1 to L2.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| amount | uint256 | undefined |
| data | bytes | undefined |
| l1Token `indexed` | address | The address of the token in L1. |
| l2Token `indexed` | address | The address of the token in L2. |
| from `indexed` | address | The address of sender in L1. |
| to | address | The address of recipient in L2. |
| amount | uint256 | The amount of token will be deposited from L1 to L2. |
| data | bytes | The optional calldata passed to recipient in L2. |
### DepositETH
@@ -468,10 +451,10 @@ Emitted when someone deposit ETH from L1 to L2.
| Name | Type | Description |
|---|---|---|
| from `indexed` | address | undefined |
| to `indexed` | address | undefined |
| amount | uint256 | undefined |
| data | bytes | undefined |
| from `indexed` | address | The address of sender in L1. |
| to `indexed` | address | The address of recipient in L2. |
| amount | uint256 | The amount of ETH will be deposited from L1 to L2. |
| data | bytes | The optional calldata passed to recipient in L2. |
### FinalizeWithdrawERC20
@@ -487,12 +470,12 @@ Emitted when ERC20 token is withdrawn from L2 to L1 and transfer to recipient.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| amount | uint256 | undefined |
| data | bytes | undefined |
| l1Token `indexed` | address | The address of the token in L1. |
| l2Token `indexed` | address | The address of the token in L2. |
| from `indexed` | address | The address of sender in L2. |
| to | address | The address of recipient in L1. |
| amount | uint256 | The amount of token withdrawn from L2 to L1. |
| data | bytes | The optional calldata passed to recipient in L1. |
### FinalizeWithdrawETH
@@ -508,10 +491,10 @@ Emitted when ETH is withdrawn from L2 to L1 and transfer to recipient.
| Name | Type | Description |
|---|---|---|
| from `indexed` | address | undefined |
| to `indexed` | address | undefined |
| amount | uint256 | undefined |
| data | bytes | undefined |
| from `indexed` | address | The address of sender in L2. |
| to `indexed` | address | The address of recipient in L1. |
| amount | uint256 | The amount of ETH withdrawn from L2 to L1. |
| data | bytes | The optional calldata passed to recipient in L1. |
### Initialized
@@ -521,7 +504,7 @@ event Initialized(uint8 version)
*Triggered when the contract has been initialized or reinitialized.*
#### Parameters
@@ -560,9 +543,9 @@ Emitted when some ERC20 token is refunded.
| Name | Type | Description |
|---|---|---|
| token `indexed` | address | undefined |
| recipient `indexed` | address | undefined |
| amount | uint256 | undefined |
| token `indexed` | address | The address of the token in L1. |
| recipient `indexed` | address | The address of receiver in L1. |
| amount | uint256 | The amount of token refunded to receiver. |
### RefundETH
@@ -578,8 +561,8 @@ Emitted when some ETH is refunded.
| Name | Type | Description |
|---|---|---|
| recipient `indexed` | address | undefined |
| amount | uint256 | undefined |
| recipient `indexed` | address | The address of receiver in L1. |
| amount | uint256 | The amount of ETH refunded to receiver. |
### SetDefaultERC20Gateway
@@ -595,8 +578,8 @@ Emitted when the address of default ERC20 Gateway is updated.
| Name | Type | Description |
|---|---|---|
| oldDefaultERC20Gateway `indexed` | address | undefined |
| newDefaultERC20Gateway `indexed` | address | undefined |
| oldDefaultERC20Gateway `indexed` | address | The address of the old default ERC20 Gateway. |
| newDefaultERC20Gateway `indexed` | address | The address of the new default ERC20 Gateway. |
### SetERC20Gateway
@@ -612,9 +595,9 @@ Emitted when the `gateway` for `token` is updated.
| Name | Type | Description |
|---|---|---|
| token `indexed` | address | undefined |
| oldGateway `indexed` | address | undefined |
| newGateway `indexed` | address | undefined |
| token `indexed` | address | The address of token updated. |
| oldGateway `indexed` | address | The corresponding address of the old gateway. |
| newGateway `indexed` | address | The corresponding address of the new gateway. |
### SetETHGateway
@@ -630,22 +613,8 @@ Emitted when the address of ETH Gateway is updated.
| Name | Type | Description |
|---|---|---|
| oldETHGateway `indexed` | address | undefined |
| newEthGateway `indexed` | address | undefined |
## Errors
### ErrorZeroAddress
```solidity
error ErrorZeroAddress()
```
*Thrown when the given address is `address(0)`.*
| oldETHGateway `indexed` | address | The address of the old ETH Gateway. |
| newEthGateway `indexed` | address | The address of the new ETH Gateway. |

View File

@@ -471,7 +471,7 @@ Emitted when a cross domain message is failed to relay.
| Name | Type | Description |
|---|---|---|
| messageHash `indexed` | bytes32 | undefined |
| messageHash `indexed` | bytes32 | The hash of the message. |
### Initialized
@@ -481,7 +481,7 @@ event Initialized(uint8 version)
*Triggered when the contract has been initialized or reinitialized.*
#### Parameters
@@ -514,7 +514,7 @@ event Paused(address account)
*Emitted when the pause is triggered by `account`.*
#### Parameters
@@ -536,7 +536,7 @@ Emitted when a cross domain message is relayed successfully.
| Name | Type | Description |
|---|---|---|
| messageHash `indexed` | bytes32 | undefined |
| messageHash `indexed` | bytes32 | The hash of the message. |
### SentMessage
@@ -552,12 +552,12 @@ Emitted when a cross domain message is sent.
| Name | Type | Description |
|---|---|---|
| sender `indexed` | address | undefined |
| target `indexed` | address | undefined |
| value | uint256 | undefined |
| messageNonce | uint256 | undefined |
| gasLimit | uint256 | undefined |
| message | bytes | undefined |
| sender `indexed` | address | The address of the sender who initiates the message. |
| target `indexed` | address | The address of target contract to call. |
| value | uint256 | The amount of value passed to the target contract. |
| messageNonce | uint256 | The nonce of the message. |
| gasLimit | uint256 | The optional gas limit passed to L1 or L2. |
| message | bytes | The calldata passed to the target contract. |
### Unpaused
@@ -567,7 +567,7 @@ event Unpaused(address account)
*Emitted when the pause is lifted by `account`.*
#### Parameters
@@ -589,8 +589,8 @@ Emitted when owner updates fee vault contract.
| Name | Type | Description |
|---|---|---|
| _oldFeeVault | address | undefined |
| _newFeeVault | address | undefined |
| _oldFeeVault | address | The address of old fee vault contract. |
| _newFeeVault | address | The address of new fee vault contract. |
### UpdateMaxReplayTimes
@@ -606,8 +606,8 @@ Emitted when the maximum number of times each message can be replayed is updated
| Name | Type | Description |
|---|---|---|
| oldMaxReplayTimes | uint256 | undefined |
| newMaxReplayTimes | uint256 | undefined |
| oldMaxReplayTimes | uint256 | The old maximum number of times each message can be replayed. |
| newMaxReplayTimes | uint256 | The new maximum number of times each message can be replayed. |

View File

@@ -130,7 +130,7 @@ Return the corresponding l2 token address given l1 token address.
### initialize
```solidity
function initialize(address _counterpart, address _router, address _messenger, address _l2TokenImplementation, address _l2TokenFactory) external nonpayable
function initialize(address _counterpart, address _router, address _messenger, address, address) external nonpayable
```
Initialize the storage of L1StandardERC20Gateway.
@@ -142,10 +142,10 @@ Initialize the storage of L1StandardERC20Gateway.
| Name | Type | Description |
|---|---|---|
| _counterpart | address | The address of L2StandardERC20Gateway in L2. |
| _router | address | The address of L1GatewayRouter. |
| _messenger | address | The address of L1ScrollMessenger. |
| _l2TokenImplementation | address | The address of ScrollStandardERC20 implementation in L2. |
| _l2TokenFactory | address | The address of ScrollStandardERC20Factory contract in L2. |
| _router | address | The address of L1GatewayRouter in L1. |
| _messenger | address | The address of L1ScrollMessenger in L1. |
| _3 | address | undefined |
| _4 | address | undefined |
### l2TokenFactory
@@ -293,12 +293,12 @@ Emitted when someone deposit ERC20 token from L1 to L2.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| amount | uint256 | undefined |
| data | bytes | undefined |
| l1Token `indexed` | address | The address of the token in L1. |
| l2Token `indexed` | address | The address of the token in L2. |
| from `indexed` | address | The address of sender in L1. |
| to | address | The address of recipient in L2. |
| amount | uint256 | The amount of token will be deposited from L1 to L2. |
| data | bytes | The optional calldata passed to recipient in L2. |
### FinalizeWithdrawERC20
@@ -314,12 +314,12 @@ Emitted when ERC20 token is withdrawn from L2 to L1 and transfer to recipient.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| amount | uint256 | undefined |
| data | bytes | undefined |
| l1Token `indexed` | address | The address of the token in L1. |
| l2Token `indexed` | address | The address of the token in L2. |
| from `indexed` | address | The address of sender in L2. |
| to | address | The address of recipient in L1. |
| amount | uint256 | The amount of token withdrawn from L2 to L1. |
| data | bytes | The optional calldata passed to recipient in L1. |
### Initialized
@@ -329,7 +329,7 @@ event Initialized(uint8 version)
*Triggered when the contract has been initialized or reinitialized.*
#### Parameters
@@ -368,9 +368,9 @@ Emitted when some ERC20 token is refunded.
| Name | Type | Description |
|---|---|---|
| token `indexed` | address | undefined |
| recipient `indexed` | address | undefined |
| amount | uint256 | undefined |
| token `indexed` | address | The address of the token in L1. |
| recipient `indexed` | address | The address of receiver in L1. |
| amount | uint256 | The amount of token refunded to receiver. |

View File

@@ -152,15 +152,15 @@ function initialize(address _counterpart, address _router, address _messenger) e
Initialize the storage of L1WETHGateway.
*The parameters `_counterpart`, `_router` and `_messenger` are no longer used.*
#### Parameters
| Name | Type | Description |
|---|---|---|
| _counterpart | address | The address of L2ETHGateway in L2. |
| _router | address | The address of L1GatewayRouter. |
| _messenger | address | The address of L1ScrollMessenger. |
| _router | address | The address of L1GatewayRouter in L1. |
| _messenger | address | The address of L1ScrollMessenger in L1. |
### l2WETH
@@ -291,12 +291,12 @@ Emitted when someone deposit ERC20 token from L1 to L2.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| amount | uint256 | undefined |
| data | bytes | undefined |
| l1Token `indexed` | address | The address of the token in L1. |
| l2Token `indexed` | address | The address of the token in L2. |
| from `indexed` | address | The address of sender in L1. |
| to | address | The address of recipient in L2. |
| amount | uint256 | The amount of token will be deposited from L1 to L2. |
| data | bytes | The optional calldata passed to recipient in L2. |
### FinalizeWithdrawERC20
@@ -312,12 +312,12 @@ Emitted when ERC20 token is withdrawn from L2 to L1 and transfer to recipient.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| amount | uint256 | undefined |
| data | bytes | undefined |
| l1Token `indexed` | address | The address of the token in L1. |
| l2Token `indexed` | address | The address of the token in L2. |
| from `indexed` | address | The address of sender in L2. |
| to | address | The address of recipient in L1. |
| amount | uint256 | The amount of token withdrawn from L2 to L1. |
| data | bytes | The optional calldata passed to recipient in L1. |
### Initialized
@@ -327,7 +327,7 @@ event Initialized(uint8 version)
*Triggered when the contract has been initialized or reinitialized.*
#### Parameters
@@ -366,9 +366,9 @@ Emitted when some ERC20 token is refunded.
| Name | Type | Description |
|---|---|---|
| token `indexed` | address | undefined |
| recipient `indexed` | address | undefined |
| amount | uint256 | undefined |
| token `indexed` | address | The address of the token in L1. |
| recipient `indexed` | address | The address of receiver in L1. |
| amount | uint256 | The amount of token refunded to receiver. |

View File

@@ -373,12 +373,12 @@ Emitted when the ERC1155 NFT is batch transferred to gateway on layer 2.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| tokenIds | uint256[] | undefined |
| amounts | uint256[] | undefined |
| l1Token `indexed` | address | The address of ERC1155 NFT on layer 1. |
| l2Token `indexed` | address | The address of ERC1155 NFT on layer 2. |
| from `indexed` | address | The address of sender on layer 2. |
| to | address | The address of recipient on layer 1. |
| tokenIds | uint256[] | The list of token ids of the ERC1155 NFT to withdraw on layer 2. |
| amounts | uint256[] | The list of corresponding amounts to withdraw. |
### FinalizeBatchDepositERC1155
@@ -394,12 +394,12 @@ Emitted when the ERC1155 NFT is batch transferred to recipient on layer 2.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| tokenIds | uint256[] | undefined |
| amounts | uint256[] | undefined |
| l1Token `indexed` | address | The address of ERC1155 NFT on layer 1. |
| l2Token `indexed` | address | The address of ERC1155 NFT on layer 2. |
| from `indexed` | address | The address of sender on layer 1. |
| to | address | The address of recipient on layer 2. |
| tokenIds | uint256[] | The list of token ids of the ERC1155 NFT deposited on layer 1. |
| amounts | uint256[] | The list of corresponding amounts deposited. |
### FinalizeDepositERC1155
@@ -415,12 +415,12 @@ Emitted when the ERC1155 NFT is transferred to recipient on layer 2.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| tokenId | uint256 | undefined |
| amount | uint256 | undefined |
| l1Token `indexed` | address | The address of ERC1155 NFT on layer 1. |
| l2Token `indexed` | address | The address of ERC1155 NFT on layer 2. |
| from `indexed` | address | The address of sender on layer 1. |
| to | address | The address of recipient on layer 2. |
| tokenId | uint256 | The token id of the ERC1155 NFT deposited on layer 1. |
| amount | uint256 | The amount of token deposited. |
### Initialized
@@ -430,7 +430,7 @@ event Initialized(uint8 version)
*Triggered when the contract has been initialized or reinitialized.*
#### Parameters
@@ -487,12 +487,12 @@ Emitted when the ERC1155 NFT is transferred to gateway on layer 2.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| tokenId | uint256 | undefined |
| amount | uint256 | undefined |
| l1Token `indexed` | address | The address of ERC1155 NFT on layer 1. |
| l2Token `indexed` | address | The address of ERC1155 NFT on layer 2. |
| from `indexed` | address | The address of sender on layer 2. |
| to | address | The address of recipient on layer 1. |
| tokenId | uint256 | The token id of the ERC1155 NFT to withdraw on layer 2. |
| amount | uint256 | The amount of token to withdraw. |

View File

@@ -318,11 +318,11 @@ Emitted when the ERC721 NFT is batch transferred to gateway on layer 2.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| tokenIds | uint256[] | undefined |
| l1Token `indexed` | address | The address of ERC721 NFT on layer 1. |
| l2Token `indexed` | address | The address of ERC721 NFT on layer 2. |
| from `indexed` | address | The address of sender on layer 2. |
| to | address | The address of recipient on layer 1. |
| tokenIds | uint256[] | The list of token ids of the ERC721 NFT to withdraw on layer 2. |
### FinalizeBatchDepositERC721
@@ -338,11 +338,11 @@ Emitted when the ERC721 NFT is batch transferred to recipient on layer 2.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| tokenIds | uint256[] | undefined |
| l1Token `indexed` | address | The address of ERC721 NFT on layer 1. |
| l2Token `indexed` | address | The address of ERC721 NFT on layer 2. |
| from `indexed` | address | The address of sender on layer 1. |
| to | address | The address of recipient on layer 2. |
| tokenIds | uint256[] | The list of token ids of the ERC721 NFT deposited on layer 1. |
### FinalizeDepositERC721
@@ -358,11 +358,11 @@ Emitted when the ERC721 NFT is transferred to recipient on layer 2.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| tokenId | uint256 | undefined |
| l1Token `indexed` | address | The address of ERC721 NFT on layer 1. |
| l2Token `indexed` | address | The address of ERC721 NFT on layer 2. |
| from `indexed` | address | The address of sender on layer 1. |
| to | address | The address of recipient on layer 2. |
| tokenId | uint256 | The token id of the ERC721 NFT deposited on layer 1. |
### Initialized
@@ -372,7 +372,7 @@ event Initialized(uint8 version)
*Triggered when the contract has been initialized or reinitialized.*
#### Parameters
@@ -429,11 +429,11 @@ Emitted when the ERC721 NFT is transferred to gateway on layer 2.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| tokenId | uint256 | undefined |
| l1Token `indexed` | address | The address of ERC721 NFT on layer 1. |
| l2Token `indexed` | address | The address of ERC721 NFT on layer 2. |
| from `indexed` | address | The address of sender on layer 2. |
| to | address | The address of recipient on layer 1. |
| tokenId | uint256 | The token id of the ERC721 NFT to withdraw on layer 2. |

View File

@@ -189,23 +189,6 @@ function initialize(address _ethGateway, address _defaultERC20Gateway) external
| _ethGateway | address | undefined |
| _defaultERC20Gateway | address | undefined |
### messenger
```solidity
function messenger() external view returns (address)
```
The address of `L2ScrollMessenger`.
#### Returns
| Name | Type | Description |
|---|---|---|
| _0 | address | undefined |
### owner
```solidity
@@ -428,12 +411,12 @@ Emitted when ERC20 token is deposited from L1 to L2 and transfer to recipient.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| amount | uint256 | undefined |
| data | bytes | undefined |
| l1Token `indexed` | address | The address of the token in L1. |
| l2Token `indexed` | address | The address of the token in L2. |
| from `indexed` | address | The address of sender in L1. |
| to | address | The address of recipient in L2. |
| amount | uint256 | The amount of token withdrawn from L1 to L2. |
| data | bytes | The optional calldata passed to recipient in L2. |
### FinalizeDepositETH
@@ -449,10 +432,10 @@ Emitted when ETH is deposited from L1 to L2 and transfer to recipient.
| Name | Type | Description |
|---|---|---|
| from `indexed` | address | undefined |
| to `indexed` | address | undefined |
| amount | uint256 | undefined |
| data | bytes | undefined |
| from `indexed` | address | The address of sender in L1. |
| to `indexed` | address | The address of recipient in L2. |
| amount | uint256 | The amount of ETH deposited from L1 to L2. |
| data | bytes | The optional calldata passed to recipient in L2. |
### Initialized
@@ -462,7 +445,7 @@ event Initialized(uint8 version)
*Triggered when the contract has been initialized or reinitialized.*
#### Parameters
@@ -501,8 +484,8 @@ Emitted when the address of default ERC20 Gateway is updated.
| Name | Type | Description |
|---|---|---|
| oldDefaultERC20Gateway `indexed` | address | undefined |
| newDefaultERC20Gateway `indexed` | address | undefined |
| oldDefaultERC20Gateway `indexed` | address | The address of the old default ERC20 Gateway. |
| newDefaultERC20Gateway `indexed` | address | The address of the new default ERC20 Gateway. |
### SetERC20Gateway
@@ -518,9 +501,9 @@ Emitted when the `gateway` for `token` is updated.
| Name | Type | Description |
|---|---|---|
| token `indexed` | address | undefined |
| oldGateway `indexed` | address | undefined |
| newGateway `indexed` | address | undefined |
| token `indexed` | address | The address of token updated. |
| oldGateway `indexed` | address | The corresponding address of the old gateway. |
| newGateway `indexed` | address | The corresponding address of the new gateway. |
### SetETHGateway
@@ -536,8 +519,8 @@ Emitted when the address of ETH Gateway is updated.
| Name | Type | Description |
|---|---|---|
| oldETHGateway `indexed` | address | undefined |
| newEthGateway `indexed` | address | undefined |
| oldETHGateway `indexed` | address | The address of the old ETH Gateway. |
| newEthGateway `indexed` | address | The address of the new ETH Gateway. |
### WithdrawERC20
@@ -553,12 +536,12 @@ Emitted when someone withdraw ERC20 token from L2 to L1.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| amount | uint256 | undefined |
| data | bytes | undefined |
| l1Token `indexed` | address | The address of the token in L1. |
| l2Token `indexed` | address | The address of the token in L2. |
| from `indexed` | address | The address of sender in L2. |
| to | address | The address of recipient in L1. |
| amount | uint256 | The amount of token will be deposited from L2 to L1. |
| data | bytes | The optional calldata passed to recipient in L1. |
### WithdrawETH
@@ -574,24 +557,10 @@ Emitted when someone withdraw ETH from L2 to L1.
| Name | Type | Description |
|---|---|---|
| from `indexed` | address | undefined |
| to `indexed` | address | undefined |
| amount | uint256 | undefined |
| data | bytes | undefined |
## Errors
### ErrorZeroAddress
```solidity
error ErrorZeroAddress()
```
*Thrown when the given address is `address(0)`.*
| from `indexed` | address | The address of sender in L2. |
| to `indexed` | address | The address of recipient in L1. |
| amount | uint256 | The amount of ETH will be deposited from L2 to L1. |
| data | bytes | The optional calldata passed to recipient in L1. |

View File

@@ -308,7 +308,7 @@ Emitted when a cross domain message is failed to relay.
| Name | Type | Description |
|---|---|---|
| messageHash `indexed` | bytes32 | undefined |
| messageHash `indexed` | bytes32 | The hash of the message. |
### Initialized
@@ -318,7 +318,7 @@ event Initialized(uint8 version)
*Triggered when the contract has been initialized or reinitialized.*
#### Parameters
@@ -351,7 +351,7 @@ event Paused(address account)
*Emitted when the pause is triggered by `account`.*
#### Parameters
@@ -373,7 +373,7 @@ Emitted when a cross domain message is relayed successfully.
| Name | Type | Description |
|---|---|---|
| messageHash `indexed` | bytes32 | undefined |
| messageHash `indexed` | bytes32 | The hash of the message. |
### SentMessage
@@ -389,12 +389,12 @@ Emitted when a cross domain message is sent.
| Name | Type | Description |
|---|---|---|
| sender `indexed` | address | undefined |
| target `indexed` | address | undefined |
| value | uint256 | undefined |
| messageNonce | uint256 | undefined |
| gasLimit | uint256 | undefined |
| message | bytes | undefined |
| sender `indexed` | address | The address of the sender who initiates the message. |
| target `indexed` | address | The address of target contract to call. |
| value | uint256 | The amount of value passed to the target contract. |
| messageNonce | uint256 | The nonce of the message. |
| gasLimit | uint256 | The optional gas limit passed to L1 or L2. |
| message | bytes | The calldata passed to the target contract. |
### Unpaused
@@ -404,7 +404,7 @@ event Unpaused(address account)
*Emitted when the pause is lifted by `account`.*
#### Parameters
@@ -426,8 +426,8 @@ Emitted when owner updates fee vault contract.
| Name | Type | Description |
|---|---|---|
| _oldFeeVault | address | undefined |
| _newFeeVault | address | undefined |
| _oldFeeVault | address | The address of old fee vault contract. |
| _newFeeVault | address | The address of new fee vault contract. |
### UpdateMaxFailedExecutionTimes
@@ -443,8 +443,8 @@ Emitted when the maximum number of times each message can fail in L2 is updated.
| Name | Type | Description |
|---|---|---|
| oldMaxFailedExecutionTimes | uint256 | undefined |
| newMaxFailedExecutionTimes | uint256 | undefined |
| oldMaxFailedExecutionTimes | uint256 | The old maximum number of times each message can fail in L2. |
| newMaxFailedExecutionTimes | uint256 | The new maximum number of times each message can fail in L2. |

View File

@@ -95,7 +95,7 @@ Return the corresponding l2 token address given l1 token address.
### initialize
```solidity
function initialize(address _counterpart, address _router, address _messenger, address _tokenFactory) external nonpayable
function initialize(address _counterpart, address _router, address _messenger, address) external nonpayable
```
Initialize the storage of L2StandardERC20Gateway.
@@ -106,10 +106,10 @@ Initialize the storage of L2StandardERC20Gateway.
| Name | Type | Description |
|---|---|---|
| _counterpart | address | The address of L1ETHGateway in L1. |
| _router | address | The address of L2GatewayRouter. |
| _messenger | address | The address of L2ScrollMessenger. |
| _tokenFactory | address | The address of ScrollStandardERC20Factory. |
| _counterpart | address | The address of `L1StandardERC20Gateway` contract in L1. |
| _router | address | The address of `L2GatewayRouter` contract in L2. |
| _messenger | address | The address of `L2ScrollMessenger` contract in L2. |
| _3 | address | undefined |
### messenger
@@ -281,12 +281,12 @@ Emitted when ERC20 token is deposited from L1 to L2 and transfer to recipient.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| amount | uint256 | undefined |
| data | bytes | undefined |
| l1Token `indexed` | address | The address of the token in L1. |
| l2Token `indexed` | address | The address of the token in L2. |
| from `indexed` | address | The address of sender in L1. |
| to | address | The address of recipient in L2. |
| amount | uint256 | The amount of token withdrawn from L1 to L2. |
| data | bytes | The optional calldata passed to recipient in L2. |
### Initialized
@@ -296,7 +296,7 @@ event Initialized(uint8 version)
*Triggered when the contract has been initialized or reinitialized.*
#### Parameters
@@ -335,12 +335,12 @@ Emitted when someone withdraw ERC20 token from L2 to L1.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| amount | uint256 | undefined |
| data | bytes | undefined |
| l1Token `indexed` | address | The address of the token in L1. |
| l2Token `indexed` | address | The address of the token in L2. |
| from `indexed` | address | The address of sender in L2. |
| to | address | The address of recipient in L1. |
| amount | uint256 | The amount of token will be deposited from L2 to L1. |
| data | bytes | The optional calldata passed to recipient in L1. |

View File

@@ -297,12 +297,12 @@ Emitted when ERC20 token is deposited from L1 to L2 and transfer to recipient.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| amount | uint256 | undefined |
| data | bytes | undefined |
| l1Token `indexed` | address | The address of the token in L1. |
| l2Token `indexed` | address | The address of the token in L2. |
| from `indexed` | address | The address of sender in L1. |
| to | address | The address of recipient in L2. |
| amount | uint256 | The amount of token withdrawn from L1 to L2. |
| data | bytes | The optional calldata passed to recipient in L2. |
### Initialized
@@ -312,7 +312,7 @@ event Initialized(uint8 version)
*Triggered when the contract has been initialized or reinitialized.*
#### Parameters
@@ -351,12 +351,12 @@ Emitted when someone withdraw ERC20 token from L2 to L1.
| Name | Type | Description |
|---|---|---|
| l1Token `indexed` | address | undefined |
| l2Token `indexed` | address | undefined |
| from `indexed` | address | undefined |
| to | address | undefined |
| amount | uint256 | undefined |
| data | bytes | undefined |
| l1Token `indexed` | address | The address of the token in L1. |
| l2Token `indexed` | address | The address of the token in L2. |
| from `indexed` | address | The address of sender in L2. |
| to | address | The address of recipient in L1. |
| amount | uint256 | The amount of token will be deposited from L2 to L1. |
| data | bytes | The optional calldata passed to recipient in L1. |

View File

@@ -91,7 +91,7 @@ function finalizeBatchWithProof(bytes _batchHeader, bytes32 _prevStateRoot, byte
Finalize a committed batch on layer 1.
*We keep this function to upgrade to 4844 more smoothly.*
#### Parameters
@@ -103,6 +103,27 @@ Finalize a committed batch on layer 1.
| _withdrawRoot | bytes32 | undefined |
| _aggrProof | bytes | undefined |
### finalizeBatchWithProof4844
```solidity
function finalizeBatchWithProof4844(bytes _batchHeader, bytes32 _prevStateRoot, bytes32 _postStateRoot, bytes32 _withdrawRoot, bytes _blobDataProof, bytes _aggrProof) external nonpayable
```
Finalize a committed batch (with blob) on layer 1.
*Memory layout of `_blobDataProof`: ```text | z | y | kzg_commitment | kzg_proof | |---------|---------|----------------|-----------| | bytes32 | bytes32 | bytes48 | bytes48 | ```*
#### Parameters
| Name | Type | Description |
|---|---|---|
| _batchHeader | bytes | undefined |
| _prevStateRoot | bytes32 | undefined |
| _postStateRoot | bytes32 | undefined |
| _withdrawRoot | bytes32 | undefined |
| _blobDataProof | bytes | undefined |
| _aggrProof | bytes | undefined |
### finalizedStateRoots
```solidity
@@ -493,8 +514,8 @@ Emitted when a new batch is committed.
| Name | Type | Description |
|---|---|---|
| batchIndex `indexed` | uint256 | undefined |
| batchHash `indexed` | bytes32 | undefined |
| batchIndex `indexed` | uint256 | The index of the batch. |
| batchHash `indexed` | bytes32 | The hash of the batch. |
### FinalizeBatch
@@ -510,10 +531,10 @@ Emitted when a batch is finalized.
| Name | Type | Description |
|---|---|---|
| batchIndex `indexed` | uint256 | undefined |
| batchHash `indexed` | bytes32 | undefined |
| stateRoot | bytes32 | undefined |
| withdrawRoot | bytes32 | undefined |
| batchIndex `indexed` | uint256 | The index of the batch. |
| batchHash `indexed` | bytes32 | The hash of the batch |
| stateRoot | bytes32 | The state root on layer 2 after this batch. |
| withdrawRoot | bytes32 | The merkle root on layer2 after this batch. |
### Initialized
@@ -523,7 +544,7 @@ event Initialized(uint8 version)
*Triggered when the contract has been initialized or reinitialized.*
#### Parameters
@@ -556,7 +577,7 @@ event Paused(address account)
*Emitted when the pause is triggered by `account`.*
#### Parameters
@@ -578,8 +599,8 @@ revert a pending batch.
| Name | Type | Description |
|---|---|---|
| batchIndex `indexed` | uint256 | undefined |
| batchHash `indexed` | bytes32 | undefined |
| batchIndex `indexed` | uint256 | The index of the batch. |
| batchHash `indexed` | bytes32 | The hash of the batch |
### Unpaused
@@ -589,7 +610,7 @@ event Unpaused(address account)
*Emitted when the pause is lifted by `account`.*
#### Parameters
@@ -652,6 +673,347 @@ Emitted when owner updates the status of sequencer.
## Errors
### ErrorAccountIsNotEOA
```solidity
error ErrorAccountIsNotEOA()
```
*Thrown when the given account is not EOA account.*
### ErrorBatchHeaderLengthTooSmall
```solidity
error ErrorBatchHeaderLengthTooSmall()
```
*Thrown when the length of batch header is smaller than 89*
### ErrorBatchIsAlreadyCommitted
```solidity
error ErrorBatchIsAlreadyCommitted()
```
*Thrown when committing a committed batch.*
### ErrorBatchIsAlreadyVerified
```solidity
error ErrorBatchIsAlreadyVerified()
```
*Thrown when finalizing a verified batch.*
### ErrorBatchIsEmpty
```solidity
error ErrorBatchIsEmpty()
```
*Thrown when committing empty batch (batch without chunks)*
### ErrorCallPointEvaluationPrecompileFailed
```solidity
error ErrorCallPointEvaluationPrecompileFailed()
```
*Thrown when call precompile failed.*
### ErrorCallerIsNotProver
```solidity
error ErrorCallerIsNotProver()
```
*Thrown when the caller is not prover.*
### ErrorCallerIsNotSequencer
```solidity
error ErrorCallerIsNotSequencer()
```
*Thrown when the caller is not sequencer.*
### ErrorFoundMultipleBlob
```solidity
error ErrorFoundMultipleBlob()
```
*Thrown when the transaction has multiple blobs.*
### ErrorGenesisBatchHasNonZeroField
```solidity
error ErrorGenesisBatchHasNonZeroField()
```
*Thrown when some fields are not zero in genesis batch.*
### ErrorGenesisBatchImported
```solidity
error ErrorGenesisBatchImported()
```
*Thrown when importing genesis batch twice.*
### ErrorGenesisDataHashIsZero
```solidity
error ErrorGenesisDataHashIsZero()
```
*Thrown when data hash in genesis batch is zero.*
### ErrorGenesisParentBatchHashIsNonZero
```solidity
error ErrorGenesisParentBatchHashIsNonZero()
```
*Thrown when the parent batch hash in genesis batch is zero.*
### ErrorIncompleteL2TransactionData
```solidity
error ErrorIncompleteL2TransactionData()
```
*Thrown when the l2 transaction is incomplete.*
### ErrorIncorrectBatchHash
```solidity
error ErrorIncorrectBatchHash()
```
*Thrown when the batch hash is incorrect.*
### ErrorIncorrectBatchIndex
```solidity
error ErrorIncorrectBatchIndex()
```
*Thrown when the batch index is incorrect.*
### ErrorIncorrectBitmapLength
```solidity
error ErrorIncorrectBitmapLength()
```
*Thrown when the bitmap length is incorrect.*
### ErrorIncorrectChunkLength
```solidity
error ErrorIncorrectChunkLength()
```
*Thrown when the length of chunk is incorrect.*
### ErrorIncorrectPreviousStateRoot
```solidity
error ErrorIncorrectPreviousStateRoot()
```
*Thrown when the previous state root doesn&#39;t match stored one.*
### ErrorInvalidBatchHeaderVersion
```solidity
error ErrorInvalidBatchHeaderVersion()
```
*Thrown when the batch header version is invalid.*
### ErrorLastL1MessageSkipped
```solidity
error ErrorLastL1MessageSkipped()
```
*Thrown when the last message is skipped.*
### ErrorNoBlobFound
```solidity
error ErrorNoBlobFound()
```
*Thrown when no blob found in the transaction.*
### ErrorNoBlockInChunk
```solidity
error ErrorNoBlockInChunk()
```
*Thrown when no blocks in chunk.*
### ErrorNumTxsLessThanNumL1Msgs
```solidity
error ErrorNumTxsLessThanNumL1Msgs()
```
*Thrown when the number of transactions is less than number of L1 message in one block.*
### ErrorPreviousStateRootIsZero
```solidity
error ErrorPreviousStateRootIsZero()
```
*Thrown when the given previous state is zero.*
### ErrorRevertFinalizedBatch
```solidity
error ErrorRevertFinalizedBatch()
```
*Thrown when reverting a finialized batch.*
### ErrorRevertNotStartFromEnd
```solidity
error ErrorRevertNotStartFromEnd()
```
*Thrown when the reverted batches are not in the ending of commited batch chain.*
### ErrorRevertZeroBatches
```solidity
error ErrorRevertZeroBatches()
```
*Thrown when the number of batches to revert is zero.*
### ErrorStateRootIsZero
```solidity
error ErrorStateRootIsZero()
```
*Thrown when the given state root is zero.*
### ErrorTooManyTxsInOneChunk
```solidity
error ErrorTooManyTxsInOneChunk()
```
*Thrown when a chunk contains too many transactions.*
### ErrorUnexpectedPointEvaluationPrecompileOutput
```solidity
error ErrorUnexpectedPointEvaluationPrecompileOutput()
```
*Thrown when the precompile output is incorrect.*
### ErrorZeroAddress
```solidity

View File

@@ -135,8 +135,8 @@ Emitted when a l2 token is deployed.
| Name | Type | Description |
|---|---|---|
| _l1Token `indexed` | address | undefined |
| _l2Token `indexed` | address | undefined |
| _l1Token `indexed` | address | The address of the l1 token. |
| _l2Token `indexed` | address | The address of the l2 token. |
### OwnershipTransferred

View File

@@ -8,8 +8,8 @@ remappings = [] # a list of remapp
libraries = [] # a list of deployed libraries to link against
cache = true # whether to cache builds or not
force = true # whether to ignore the cache (clean build)
evm_version = 'london' # the evm version (by hardfork name)
solc_version = '0.8.16' # override for the solc version (setting this ignores `auto_detect_solc`)
# evm_version = 'london' # the evm version (by hardfork name)
solc_version = '0.8.24' # override for the solc version (setting this ignores `auto_detect_solc`)
optimizer = true # enable or disable the solc optimizer
optimizer_runs = 200 # the number of optimizer runs
verbosity = 2 # the verbosity of tests

View File

@@ -2,8 +2,9 @@ import * as dotenv from "dotenv";
import { HardhatUserConfig, subtask } from "hardhat/config";
import * as toml from "toml";
import "@nomiclabs/hardhat-etherscan";
import "@nomiclabs/hardhat-waffle";
import "@nomicfoundation/hardhat-verify";
import "@nomicfoundation/hardhat-ethers";
import "@nomicfoundation/hardhat-chai-matchers";
import "@typechain/hardhat";
import "@primitivefi/hardhat-dodoc";
import "hardhat-gas-reporter";
@@ -13,16 +14,10 @@ import { TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS } from "hardhat/builtin-tasks/ta
dotenv.config();
// default values here to avoid failures when running hardhat
const RINKEBY_RPC = process.env.RINKEBY_RPC || "1".repeat(32);
const SCROLL_L1_RPC = process.env.SCROLL_L1_RPC || "1".repeat(32);
const SCROLL_L2_RPC = process.env.SCROLL_L2_RPC || "1".repeat(32);
const RINKEBY_PRIVATE_KEY = process.env.RINKEBY_PRIVATE_KEY || "1".repeat(64);
const L1_DEPLOYER_PRIVATE_KEY = process.env.L1_DEPLOYER_PRIVATE_KEY || "1".repeat(64);
const L2_DEPLOYER_PRIVATE_KEY = process.env.L2_DEPLOYER_PRIVATE_KEY || "1".repeat(64);
const SOLC_DEFAULT = "0.8.16";
const SOLC_DEFAULT = "0.8.24";
// try use forge config
let foundry: any;
@@ -45,29 +40,30 @@ subtask(TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS).setAction(async (_, __, runSuper
const config: HardhatUserConfig = {
solidity: {
version: foundry.default?.solc || SOLC_DEFAULT,
version: foundry.default?.solc_version || SOLC_DEFAULT,
settings: {
optimizer: {
enabled: foundry.default?.optimizer || true,
runs: foundry.default?.optimizer_runs || 200,
},
evmVersion: "cancun",
},
},
networks: {
rinkeby: {
url: RINKEBY_RPC,
accounts: [RINKEBY_PRIVATE_KEY],
},
l1geth: {
url: SCROLL_L1_RPC,
gasPrice: 20000000000,
gasMultiplier: 1.1,
ethereum: {
url: "https://1rpc.io/eth",
accounts: [L1_DEPLOYER_PRIVATE_KEY],
},
l2geth: {
url: SCROLL_L2_RPC,
gasPrice: 20000000000,
gasMultiplier: 1.1,
sepolia: {
url: "https://1rpc.io/sepolia",
accounts: [L1_DEPLOYER_PRIVATE_KEY],
},
scroll: {
url: "https://rpc.scroll.io",
accounts: [L2_DEPLOYER_PRIVATE_KEY],
},
scroll_sepolia: {
url: "https://sepolia-rpc.scroll.io",
accounts: [L2_DEPLOYER_PRIVATE_KEY],
},
},
@@ -76,13 +72,40 @@ const config: HardhatUserConfig = {
sources: "./src",
tests: "./integration-test",
},
typechain: {
outDir: "./typechain",
target: "ethers-v6",
},
gasReporter: {
enabled: process.env.REPORT_GAS !== undefined,
excludeContracts: ["src/test"],
currency: "USD",
},
etherscan: {
apiKey: process.env.ETHERSCAN_API_KEY,
apiKey: {
ethereum: process.env.ETHERSCAN_API_KEY || "",
sepolia: process.env.ETHERSCAN_API_KEY || "",
scroll: process.env.SCROLLSCAN_API_KEY || "",
scroll_sepolia: process.env.SCROLLSCAN_API_KEY || "",
},
customChains: [
{
network: "scroll",
chainId: 534352,
urls: {
apiURL: "https://api.scrollscan.com/api",
browserURL: "https://www.scrollscan.com/",
},
},
{
network: "scroll_sepolia",
chainId: 534351,
urls: {
apiURL: "https://api-sepolia.scrollscan.com/api",
browserURL: "https://sepolia.scrollscan.com/",
},
},
],
},
mocha: {
timeout: 10000000,

View File

@@ -1,16 +1,16 @@
/* eslint-disable node/no-unpublished-import */
/* eslint-disable node/no-missing-import */
import { HardhatEthersSigner, SignerWithAddress } from "@nomicfoundation/hardhat-ethers/signers";
import { expect } from "chai";
import { BigNumberish, BytesLike, constants } from "ethers";
import { BigNumberish, BytesLike, MaxUint256, ZeroAddress, getBytes } from "ethers";
import { ethers } from "hardhat";
import { EnforcedTxGateway, L1MessageQueue, L2GasPriceOracle, MockCaller } from "../typechain";
import { SignerWithAddress } from "@nomiclabs/hardhat-ethers/signers";
import { arrayify } from "ethers/lib/utils";
describe("EnforcedTxGateway.spec", async () => {
let deployer: SignerWithAddress;
let feeVault: SignerWithAddress;
let signer: SignerWithAddress;
let deployer: HardhatEthersSigner;
let feeVault: HardhatEthersSigner;
let signer: HardhatEthersSigner;
let caller: MockCaller;
let gateway: EnforcedTxGateway;
@@ -21,10 +21,8 @@ describe("EnforcedTxGateway.spec", async () => {
const TransparentUpgradeableProxy = await ethers.getContractFactory("TransparentUpgradeableProxy", deployer);
const Factory = await ethers.getContractFactory(name, deployer);
const impl = args.length > 0 ? await Factory.deploy(...args) : await Factory.deploy();
await impl.deployed();
const proxy = await TransparentUpgradeableProxy.deploy(impl.address, admin, "0x");
await proxy.deployed();
return proxy.address;
const proxy = await TransparentUpgradeableProxy.deploy(impl.getAddress(), admin, "0x");
return proxy.getAddress();
};
beforeEach(async () => {
@@ -32,66 +30,61 @@ describe("EnforcedTxGateway.spec", async () => {
const ProxyAdmin = await ethers.getContractFactory("ProxyAdmin", deployer);
const admin = await ProxyAdmin.deploy();
await admin.deployed();
gateway = await ethers.getContractAt(
"EnforcedTxGateway",
await deployProxy("EnforcedTxGateway", admin.address, []),
await deployProxy("EnforcedTxGateway", await admin.getAddress(), []),
deployer
);
queue = await ethers.getContractAt(
"L1MessageQueue",
await deployProxy("L1MessageQueue", admin.address, [deployer.address, deployer.address, gateway.address]),
await deployProxy("L1MessageQueue", await admin.getAddress(), [
deployer.address,
deployer.address,
await gateway.getAddress(),
]),
deployer
);
oracle = await ethers.getContractAt(
"L2GasPriceOracle",
await deployProxy("L2GasPriceOracle", admin.address, []),
await deployProxy("L2GasPriceOracle", await admin.getAddress(), []),
deployer
);
const MockCaller = await ethers.getContractFactory("MockCaller", deployer);
caller = await MockCaller.deploy();
await caller.deployed();
await queue.initialize(
constants.AddressZero,
constants.AddressZero,
constants.AddressZero,
oracle.address,
10000000
);
await gateway.initialize(queue.address, feeVault.address);
await queue.initialize(ZeroAddress, ZeroAddress, ZeroAddress, oracle.getAddress(), 10000000);
await gateway.initialize(queue.getAddress(), feeVault.address);
await oracle.initialize(21000, 51000, 8, 16);
const Whitelist = await ethers.getContractFactory("Whitelist", deployer);
const whitelist = await Whitelist.deploy(deployer.address);
await whitelist.deployed();
await whitelist.updateWhitelistStatus([deployer.address], true);
await oracle.updateWhitelist(whitelist.address);
await oracle.updateWhitelist(whitelist.getAddress());
await oracle.setL2BaseFee(1);
});
context("auth", async () => {
it("should initialize correctly", async () => {
expect(await gateway.owner()).to.eq(deployer.address);
expect(await gateway.messageQueue()).to.eq(queue.address);
expect(await gateway.messageQueue()).to.eq(await queue.getAddress());
expect(await gateway.feeVault()).to.eq(feeVault.address);
expect(await gateway.paused()).to.eq(false);
});
it("should revert, when initialize again", async () => {
await expect(gateway.initialize(constants.AddressZero, constants.AddressZero)).to.revertedWith(
await expect(gateway.initialize(ZeroAddress, ZeroAddress)).to.revertedWith(
"Initializable: contract is already initialized"
);
});
context("#updateFeeVault", async () => {
it("should revert, when non-owner call", async () => {
await expect(gateway.connect(signer).updateFeeVault(constants.AddressZero)).to.revertedWith(
await expect(gateway.connect(signer).updateFeeVault(ZeroAddress)).to.revertedWith(
"Ownable: caller is not the owner"
);
});
@@ -129,13 +122,13 @@ describe("EnforcedTxGateway.spec", async () => {
});
it("should revert, when call is not EOA", async () => {
const tx = await gateway.populateTransaction["sendTransaction(address,uint256,uint256,bytes)"](
const calldata = gateway.interface.encodeFunctionData("sendTransaction(address,uint256,uint256,bytes)", [
signer.address,
0,
0,
"0x"
);
await expect(caller.callTarget(gateway.address, tx.data!)).to.revertedWith(
"0x",
]);
await expect(caller.callTarget(gateway.getAddress(), calldata)).to.revertedWith(
"Only EOA senders are allowed to send enforced transaction"
);
});
@@ -145,12 +138,12 @@ describe("EnforcedTxGateway.spec", async () => {
await expect(
gateway
.connect(signer)
["sendTransaction(address,uint256,uint256,bytes)"](signer.address, 0, 1000000, "0x", { value: fee.sub(1) })
["sendTransaction(address,uint256,uint256,bytes)"](signer.address, 0, 1000000, "0x", { value: fee - 1n })
).to.revertedWith("Insufficient value for fee");
});
it("should revert, when failed to deduct the fee", async () => {
await gateway.updateFeeVault(gateway.address);
await gateway.updateFeeVault(gateway.getAddress());
const fee = await queue.estimateCrossDomainMessageFee(1000000);
await expect(
gateway
@@ -170,7 +163,7 @@ describe("EnforcedTxGateway.spec", async () => {
.to.emit(queue, "QueueTransaction")
.withArgs(signer.address, deployer.address, 0, 0, 1000000, "0x");
const feeVaultBalanceAfter = await ethers.provider.getBalance(feeVault.address);
expect(feeVaultBalanceAfter.sub(feeVaultBalanceBefore)).to.eq(fee);
expect(feeVaultBalanceAfter - feeVaultBalanceBefore).to.eq(fee);
});
it("should succeed, with refund", async () => {
@@ -179,17 +172,15 @@ describe("EnforcedTxGateway.spec", async () => {
const signerBalanceBefore = await ethers.provider.getBalance(signer.address);
const tx = await gateway
.connect(signer)
["sendTransaction(address,uint256,uint256,bytes)"](deployer.address, 0, 1000000, "0x", { value: fee.add(100) });
["sendTransaction(address,uint256,uint256,bytes)"](deployer.address, 0, 1000000, "0x", { value: fee + 100n });
await expect(tx)
.to.emit(queue, "QueueTransaction")
.withArgs(signer.address, deployer.address, 0, 0, 1000000, "0x");
const receipt = await tx.wait();
const feeVaultBalanceAfter = await ethers.provider.getBalance(feeVault.address);
const signerBalanceAfter = await ethers.provider.getBalance(signer.address);
expect(feeVaultBalanceAfter.sub(feeVaultBalanceBefore)).to.eq(fee);
expect(signerBalanceBefore.sub(signerBalanceAfter)).to.eq(
receipt.gasUsed.mul(receipt.effectiveGasPrice).add(fee)
);
expect(feeVaultBalanceAfter - feeVaultBalanceBefore).to.eq(fee);
expect(signerBalanceBefore - signerBalanceAfter).to.eq(receipt!.gasUsed * receipt!.gasPrice + fee);
});
});
@@ -203,19 +194,19 @@ describe("EnforcedTxGateway.spec", async () => {
) => {
const enforcedTx = {
sender: signer.address,
target: target,
value: value,
gasLimit: gasLimit,
data: arrayify(data),
target,
value,
gasLimit,
data: getBytes(data),
nonce: await gateway.nonces(signer.address),
deadline: constants.MaxUint256,
deadline: MaxUint256,
};
const domain = {
name: "EnforcedTxGateway",
version: "1",
chainId: (await ethers.provider.getNetwork()).chainId,
verifyingContract: gateway.address,
verifyingContract: await gateway.getAddress(),
};
const types = {
@@ -251,7 +242,7 @@ describe("EnforcedTxGateway.spec", async () => {
],
};
const signature = await signer._signTypedData(domain, types, enforcedTx);
const signature = await signer.signTypedData(domain, types, enforcedTx);
return signature;
};
@@ -266,15 +257,15 @@ describe("EnforcedTxGateway.spec", async () => {
0,
0,
"0x",
constants.MaxUint256,
MaxUint256,
"0x",
constants.AddressZero
ZeroAddress
)
).to.revertedWith("Pausable: paused");
});
it("should revert, when signature expired", async () => {
const timestamp = (await ethers.provider.getBlock("latest")).timestamp;
const timestamp = (await ethers.provider.getBlock("latest"))!.timestamp;
await expect(
gateway
.connect(deployer)
@@ -286,7 +277,7 @@ describe("EnforcedTxGateway.spec", async () => {
"0x",
timestamp - 1,
"0x",
constants.AddressZero
ZeroAddress
)
).to.revertedWith("signature expired");
});
@@ -302,9 +293,9 @@ describe("EnforcedTxGateway.spec", async () => {
0,
0,
"0x",
constants.MaxUint256,
MaxUint256,
signature,
constants.AddressZero
ZeroAddress
)
).to.revertedWith("Incorrect signature");
});
@@ -321,16 +312,16 @@ describe("EnforcedTxGateway.spec", async () => {
0,
1000000,
"0x",
constants.MaxUint256,
MaxUint256,
signature,
signer.address,
{ value: fee.sub(1) }
{ value: fee - 1n }
)
).to.revertedWith("Insufficient value for fee");
});
it("should revert, when failed to deduct the fee", async () => {
await gateway.updateFeeVault(gateway.address);
await gateway.updateFeeVault(gateway.getAddress());
const signature = await getSignature(signer, signer.address, 0, 1000000, "0x");
const fee = await queue.estimateCrossDomainMessageFee(1000000);
await expect(
@@ -342,7 +333,7 @@ describe("EnforcedTxGateway.spec", async () => {
0,
1000000,
"0x",
constants.MaxUint256,
MaxUint256,
signature,
signer.address,
{ value: fee }
@@ -364,7 +355,7 @@ describe("EnforcedTxGateway.spec", async () => {
0,
1000000,
"0x",
constants.MaxUint256,
MaxUint256,
signature,
signer.address,
{ value: fee }
@@ -374,7 +365,7 @@ describe("EnforcedTxGateway.spec", async () => {
.withArgs(signer.address, deployer.address, 0, 0, 1000000, "0x");
expect(await gateway.nonces(signer.address)).to.eq(1);
const feeVaultBalanceAfter = await ethers.provider.getBalance(feeVault.address);
expect(feeVaultBalanceAfter.sub(feeVaultBalanceBefore)).to.eq(fee);
expect(feeVaultBalanceAfter - feeVaultBalanceBefore).to.eq(fee);
// use the same nonce to sign should fail
await expect(
@@ -386,7 +377,7 @@ describe("EnforcedTxGateway.spec", async () => {
0,
1000000,
"0x",
constants.MaxUint256,
MaxUint256,
signature,
signer.address,
{ value: fee }
@@ -409,10 +400,10 @@ describe("EnforcedTxGateway.spec", async () => {
0,
1000000,
"0x",
constants.MaxUint256,
MaxUint256,
signature,
signer.address,
{ value: fee.add(100) }
{ value: fee + 100n }
)
)
.to.emit(queue, "QueueTransaction")
@@ -420,8 +411,8 @@ describe("EnforcedTxGateway.spec", async () => {
expect(await gateway.nonces(signer.address)).to.eq(1);
const feeVaultBalanceAfter = await ethers.provider.getBalance(feeVault.address);
const signerBalanceAfter = await ethers.provider.getBalance(signer.address);
expect(feeVaultBalanceAfter.sub(feeVaultBalanceBefore)).to.eq(fee);
expect(signerBalanceAfter.sub(signerBalanceBefore)).to.eq(100);
expect(feeVaultBalanceAfter - feeVaultBalanceBefore).to.eq(fee);
expect(signerBalanceAfter - signerBalanceBefore).to.eq(100n);
// use the same nonce to sign should fail
await expect(
@@ -433,10 +424,10 @@ describe("EnforcedTxGateway.spec", async () => {
0,
1000000,
"0x",
constants.MaxUint256,
MaxUint256,
signature,
signer.address,
{ value: fee.add(100) }
{ value: fee + 100n }
)
).to.revertedWith("Incorrect signature");
});
@@ -453,10 +444,10 @@ describe("EnforcedTxGateway.spec", async () => {
0,
1000000,
"0x1234",
constants.MaxUint256,
MaxUint256,
signature,
gateway.address,
{ value: fee.add(100) }
gateway.getAddress(),
{ value: fee + 100n }
)
).to.revertedWith("Failed to refund the fee");
});

View File

@@ -1,9 +1,8 @@
/* eslint-disable node/no-missing-import */
/* eslint-disable node/no-unpublished-import */
import { SignerWithAddress } from "@nomiclabs/hardhat-ethers/signers";
import { HardhatEthersSigner } from "@nomicfoundation/hardhat-ethers/signers";
import { expect } from "chai";
import { BigNumber, BigNumberish, ContractTransaction, constants } from "ethers";
import { keccak256 } from "ethers/lib/utils";
import { BigNumberish, ContractTransactionResponse, MaxUint256, keccak256, toQuantity } from "ethers";
import { ethers, network } from "hardhat";
import {
@@ -24,31 +23,27 @@ describe("GasOptimizationUpgrade.spec", async () => {
const L2_MESSAGE_QUEUE = "0x5300000000000000000000000000000000000000";
const SCROLL_CHAIN = "0xa13BAF47339d63B743e7Da8741db5456DAc1E556";
let deployer: SignerWithAddress;
let deployer: HardhatEthersSigner;
let proxyAdmin: ProxyAdmin;
const mockERC20Balance = async (tokenAddress: string, balance: BigNumber, slot: BigNumberish) => {
const mockERC20Balance = async (tokenAddress: string, balance: bigint, slot: BigNumberish) => {
const storageSlot = keccak256(
ethers.utils.defaultAbiCoder.encode(["address", "uint256"], [deployer.address, slot])
ethers.AbiCoder.defaultAbiCoder().encode(["address", "uint256"], [deployer.address, slot])
);
await ethers.provider.send("hardhat_setStorageAt", [
tokenAddress,
storageSlot,
ethers.utils.hexlify(ethers.utils.zeroPad(balance.toHexString(), 32)),
]);
await ethers.provider.send("hardhat_setStorageAt", [tokenAddress, storageSlot, toQuantity(balance)]);
const token = await ethers.getContractAt("MockERC20", tokenAddress, deployer);
expect(await token.balanceOf(deployer.address)).to.eq(balance);
};
const mockETHBalance = async (balance: BigNumber) => {
await network.provider.send("hardhat_setBalance", [deployer.address, balance.toHexString()]);
expect(await deployer.getBalance()).to.eq(balance);
const mockETHBalance = async (balance: bigint) => {
await network.provider.send("hardhat_setBalance", [deployer.address, toQuantity(balance)]);
expect(await ethers.provider.getBalance(deployer.address)).to.eq(balance);
};
const showGasUsage = async (tx: ContractTransaction, desc: string) => {
const showGasUsage = async (tx: ContractTransactionResponse, desc: string) => {
const receipt = await tx.wait();
console.log(`${desc}: GasUsed[${receipt.gasUsed}]`);
console.log(`${desc}: GasUsed[${receipt!.gasUsed}]`);
};
context("L1 upgrade", async () => {
@@ -59,7 +54,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
beforeEach(async () => {
// fork network
const provider = new ethers.providers.JsonRpcProvider("https://rpc.ankr.com/eth");
const provider = new ethers.JsonRpcProvider("https://rpc.ankr.com/eth");
if (!forkBlock) {
forkBlock = (await provider.getBlockNumber()) - 10;
}
@@ -81,14 +76,14 @@ describe("GasOptimizationUpgrade.spec", async () => {
// mock eth balance
deployer = await ethers.getSigner("0x1100000000000000000000000000000000000011");
await mockETHBalance(ethers.utils.parseEther("1000"));
await mockETHBalance(ethers.parseEther("1000"));
// mock owner of proxy admin
proxyAdmin = await ethers.getContractAt("ProxyAdmin", "0xEB803eb3F501998126bf37bB823646Ed3D59d072", deployer);
await ethers.provider.send("hardhat_setStorageAt", [
proxyAdmin.address,
await proxyAdmin.getAddress(),
"0x0",
ethers.utils.hexlify(ethers.utils.zeroPad(deployer.address, 32)),
ethers.AbiCoder.defaultAbiCoder().encode(["address"], [deployer.address]),
]);
expect(await proxyAdmin.owner()).to.eq(deployer.address);
@@ -107,9 +102,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
const ScrollChain = await ethers.getContractFactory("ScrollChain", deployer);
await proxyAdmin.upgrade(
L1_MESSENGER,
(
await L1ScrollMessenger.deploy(L2_MESSENGER, SCROLL_CHAIN, L1_MESSAGE_QUEUE)
).address
(await L1ScrollMessenger.deploy(L2_MESSENGER, SCROLL_CHAIN, L1_MESSAGE_QUEUE)).getAddress()
);
await proxyAdmin.upgrade(
L1_MESSAGE_QUEUE,
@@ -119,14 +112,12 @@ describe("GasOptimizationUpgrade.spec", async () => {
SCROLL_CHAIN,
"0x72CAcBcfDe2d1e19122F8A36a4d6676cd39d7A5d"
)
).address
).getAddress()
);
await queue.initializeV2();
await proxyAdmin.upgrade(
SCROLL_CHAIN,
(
await ScrollChain.deploy(534352, L1_MESSAGE_QUEUE, "0xA2Ab526e5C5491F10FC05A55F064BF9F7CEf32a0")
).address
(await ScrollChain.deploy(534352, L1_MESSAGE_QUEUE, "0xA2Ab526e5C5491F10FC05A55F064BF9F7CEf32a0")).getAddress()
);
};
@@ -136,40 +127,40 @@ describe("GasOptimizationUpgrade.spec", async () => {
const L1ETHGateway = await ethers.getContractFactory("L1ETHGateway", deployer);
const impl = await L1ETHGateway.deploy(L2_GATEWAY, L1_ROUTER, L1_MESSENGER);
const gateway = await ethers.getContractAt("L1ETHGateway", L1_GATEWAY, deployer);
const amountIn = ethers.utils.parseEther("1");
const amountIn = ethers.parseEther("1");
const fee = await queue.estimateCrossDomainMessageFee(1e6);
// before upgrade
await showGasUsage(
await gateway["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn.add(fee) }),
await gateway["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn + fee }),
"L1ETHGateway.depositETH before upgrade"
);
await showGasUsage(
await router["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn.add(fee) }),
await router["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn + fee }),
"L1GatewayRouter.depositETH before upgrade"
);
await showGasUsage(
await messenger["sendMessage(address,uint256,bytes,uint256)"](deployer.address, amountIn, "0x", 1e6, {
value: amountIn.add(fee),
value: amountIn + fee,
}),
"L1ScrollMessenger.sendMessage before upgrade"
);
// do upgrade
await upgradeL1(L1_GATEWAY, impl.address);
await upgradeL1(L1_GATEWAY, await impl.getAddress());
// after upgrade
await showGasUsage(
await gateway["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn.add(fee) }),
await gateway["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn + fee }),
"L1ETHGateway.depositETH after upgrade"
);
await showGasUsage(
await router["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn.add(fee) }),
await router["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn + fee }),
"L1GatewayRouter.depositETH after upgrade"
);
await showGasUsage(
await messenger["sendMessage(address,uint256,bytes,uint256)"](deployer.address, amountIn, "0x", 1e6, {
value: amountIn.add(fee),
value: amountIn + fee,
}),
"L1ScrollMessenger.sendMessage after upgrade"
);
@@ -183,12 +174,12 @@ describe("GasOptimizationUpgrade.spec", async () => {
const L1WETHGateway = await ethers.getContractFactory("L1WETHGateway", deployer);
const impl = await L1WETHGateway.deploy(L1_WETH, L2_WETH, L2_GATEWAY, L1_ROUTER, L1_MESSENGER);
const gateway = await ethers.getContractAt("L1WETHGateway", L1_GATEWAY, deployer);
const amountIn = ethers.utils.parseEther("1");
const amountIn = ethers.parseEther("1");
const fee = await queue.estimateCrossDomainMessageFee(1e6);
const token = await ethers.getContractAt("MockERC20", L1_WETH, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 3);
await token.approve(L1_GATEWAY, constants.MaxUint256);
await token.approve(L1_ROUTER, constants.MaxUint256);
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 3);
await token.approve(L1_GATEWAY, MaxUint256);
await token.approve(L1_ROUTER, MaxUint256);
// before upgrade
await showGasUsage(
@@ -201,7 +192,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
);
// do upgrade
await upgradeL1(L1_GATEWAY, impl.address);
await upgradeL1(L1_GATEWAY, await impl.getAddress());
// after upgrade
await showGasUsage(
@@ -227,12 +218,12 @@ describe("GasOptimizationUpgrade.spec", async () => {
"0x66e5312EDeEAef6e80759A0F789e7914Fb401484"
);
const gateway = await ethers.getContractAt("L1StandardERC20Gateway", L1_GATEWAY, deployer);
const amountIn = ethers.utils.parseUnits("1", 6);
const amountIn = ethers.parseUnits("1", 6);
const fee = await queue.estimateCrossDomainMessageFee(1e6);
const token = await ethers.getContractAt("MockERC20", L1_USDT, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 2);
await token.approve(L1_GATEWAY, constants.MaxUint256);
await token.approve(L1_ROUTER, constants.MaxUint256);
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 2);
await token.approve(L1_GATEWAY, MaxUint256);
await token.approve(L1_ROUTER, MaxUint256);
// before upgrade
await showGasUsage(
@@ -245,7 +236,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
);
// do upgrade
await upgradeL1(L1_GATEWAY, impl.address);
await upgradeL1(L1_GATEWAY, await impl.getAddress());
// after upgrade
await showGasUsage(
@@ -265,12 +256,12 @@ describe("GasOptimizationUpgrade.spec", async () => {
const L1CustomERC20Gateway = await ethers.getContractFactory("L1CustomERC20Gateway", deployer);
const impl = await L1CustomERC20Gateway.deploy(L2_GATEWAY, L1_ROUTER, L1_MESSENGER);
const gateway = await ethers.getContractAt("L1CustomERC20Gateway", L1_GATEWAY, deployer);
const amountIn = ethers.utils.parseUnits("1", 18);
const amountIn = ethers.parseUnits("1", 18);
const fee = await queue.estimateCrossDomainMessageFee(1e6);
const token = await ethers.getContractAt("MockERC20", L1_DAI, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 2);
await token.approve(L1_GATEWAY, constants.MaxUint256);
await token.approve(L1_ROUTER, constants.MaxUint256);
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 2);
await token.approve(L1_GATEWAY, MaxUint256);
await token.approve(L1_ROUTER, MaxUint256);
// before upgrade
await showGasUsage(
@@ -283,7 +274,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
);
// do upgrade
await upgradeL1(L1_GATEWAY, impl.address);
await upgradeL1(L1_GATEWAY, await impl.getAddress());
// after upgrade
await showGasUsage(
@@ -304,12 +295,12 @@ describe("GasOptimizationUpgrade.spec", async () => {
const L1USDCGateway = await ethers.getContractFactory("L1USDCGateway", deployer);
const impl = await L1USDCGateway.deploy(L1_USDC, L2_USDC, L2_GATEWAY, L1_ROUTER, L1_MESSENGER);
const gateway = await ethers.getContractAt("L1USDCGateway", L1_GATEWAY, deployer);
const amountIn = ethers.utils.parseUnits("1", 6);
const amountIn = ethers.parseUnits("1", 6);
const fee = await queue.estimateCrossDomainMessageFee(1e6);
const token = await ethers.getContractAt("MockERC20", L1_USDC, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 9);
await token.approve(L1_GATEWAY, constants.MaxUint256);
await token.approve(L1_ROUTER, constants.MaxUint256);
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 9);
await token.approve(L1_GATEWAY, MaxUint256);
await token.approve(L1_ROUTER, MaxUint256);
// before upgrade
await showGasUsage(
@@ -322,7 +313,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
);
// do upgrade
await upgradeL1(L1_GATEWAY, impl.address);
await upgradeL1(L1_GATEWAY, await impl.getAddress());
// after upgrade
await showGasUsage(
@@ -343,12 +334,12 @@ describe("GasOptimizationUpgrade.spec", async () => {
const L1LidoGateway = await ethers.getContractFactory("L1LidoGateway", deployer);
const impl = await L1LidoGateway.deploy(L1_WSTETH, L2_WSTETH, L2_GATEWAY, L1_ROUTER, L1_MESSENGER);
const gateway = await ethers.getContractAt("L1LidoGateway", L1_GATEWAY, deployer);
const amountIn = ethers.utils.parseUnits("1", 6);
const amountIn = ethers.parseUnits("1", 6);
const fee = await queue.estimateCrossDomainMessageFee(1e6);
const token = await ethers.getContractAt("MockERC20", L1_WSTETH, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 0);
await token.approve(L1_GATEWAY, constants.MaxUint256);
await token.approve(L1_ROUTER, constants.MaxUint256);
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 0);
await token.approve(L1_GATEWAY, MaxUint256);
await token.approve(L1_ROUTER, MaxUint256);
// before upgrade
await showGasUsage(
@@ -361,7 +352,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
);
// do upgrade
await upgradeL1(L1_GATEWAY, impl.address);
await upgradeL1(L1_GATEWAY, await impl.getAddress());
await gateway.initializeV2(deployer.address, deployer.address, deployer.address, deployer.address);
// after upgrade
@@ -383,7 +374,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
beforeEach(async () => {
// fork network
const provider = new ethers.providers.JsonRpcProvider("https://rpc.scroll.io");
const provider = new ethers.JsonRpcProvider("https://rpc.scroll.io");
if (!forkBlock) {
forkBlock = (await provider.getBlockNumber()) - 31;
}
@@ -405,14 +396,14 @@ describe("GasOptimizationUpgrade.spec", async () => {
// mock eth balance
deployer = await ethers.getSigner("0x1100000000000000000000000000000000000011");
await mockETHBalance(ethers.utils.parseEther("1000"));
await mockETHBalance(ethers.parseEther("1000"));
// mock owner of proxy admin
proxyAdmin = await ethers.getContractAt("ProxyAdmin", "0xA76acF000C890b0DD7AEEf57627d9899F955d026", deployer);
await ethers.provider.send("hardhat_setStorageAt", [
proxyAdmin.address,
await proxyAdmin.getAddress(),
"0x0",
ethers.utils.hexlify(ethers.utils.zeroPad(deployer.address, 32)),
ethers.AbiCoder.defaultAbiCoder().encode(["address"], [deployer.address]),
]);
expect(await proxyAdmin.owner()).to.eq(deployer.address);
@@ -423,7 +414,10 @@ describe("GasOptimizationUpgrade.spec", async () => {
const upgradeL2 = async (proxy: string, impl: string) => {
await proxyAdmin.upgrade(proxy, impl);
const L2ScrollMessenger = await ethers.getContractFactory("L2ScrollMessenger", deployer);
await proxyAdmin.upgrade(L2_MESSENGER, (await L2ScrollMessenger.deploy(L1_MESSENGER, L2_MESSAGE_QUEUE)).address);
await proxyAdmin.upgrade(
L2_MESSENGER,
(await L2ScrollMessenger.deploy(L1_MESSENGER, L2_MESSAGE_QUEUE)).getAddress()
);
};
it.skip("should succeed on L2ETHGateway", async () => {
@@ -432,7 +426,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
const L2ETHGateway = await ethers.getContractFactory("L2ETHGateway", deployer);
const impl = await L2ETHGateway.deploy(L1_GATEWAY, L2_ROUTER, L2_MESSENGER);
const gateway = await ethers.getContractAt("L2ETHGateway", L2_GATEWAY, deployer);
const amountIn = ethers.utils.parseEther("1");
const amountIn = ethers.parseEther("1");
// before upgrade
await showGasUsage(
@@ -451,7 +445,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
);
// do upgrade
await upgradeL2(L2_GATEWAY, impl.address);
await upgradeL2(L2_GATEWAY, await impl.getAddress());
// after upgrade
await showGasUsage(
@@ -478,11 +472,11 @@ describe("GasOptimizationUpgrade.spec", async () => {
const L2WETHGateway = await ethers.getContractFactory("L2WETHGateway", deployer);
const impl = await L2WETHGateway.deploy(L2_WETH, L1_WETH, L1_GATEWAY, L2_ROUTER, L2_MESSENGER);
const gateway = await ethers.getContractAt("L2WETHGateway", L2_GATEWAY, deployer);
const amountIn = ethers.utils.parseEther("1");
const amountIn = ethers.parseEther("1");
const token = await ethers.getContractAt("MockERC20", L2_WETH, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 0);
await token.approve(L2_GATEWAY, constants.MaxUint256);
await token.approve(L2_ROUTER, constants.MaxUint256);
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 0);
await token.approve(L2_GATEWAY, MaxUint256);
await token.approve(L2_ROUTER, MaxUint256);
// before upgrade
await showGasUsage(
@@ -495,7 +489,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
);
// do upgrade
await upgradeL2(L2_GATEWAY, impl.address);
await upgradeL2(L2_GATEWAY, await impl.getAddress());
// after upgrade
await showGasUsage(
@@ -520,11 +514,11 @@ describe("GasOptimizationUpgrade.spec", async () => {
"0x66e5312EDeEAef6e80759A0F789e7914Fb401484"
);
const gateway = await ethers.getContractAt("L2StandardERC20Gateway", L2_GATEWAY, deployer);
const amountIn = ethers.utils.parseUnits("1", 6);
const amountIn = ethers.parseUnits("1", 6);
const token = await ethers.getContractAt("MockERC20", L2_USDT, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 51);
await token.approve(L2_GATEWAY, constants.MaxUint256);
await token.approve(L2_ROUTER, constants.MaxUint256);
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 51);
await token.approve(L2_GATEWAY, MaxUint256);
await token.approve(L2_ROUTER, MaxUint256);
// before upgrade
await showGasUsage(
@@ -537,7 +531,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
);
// do upgrade
await upgradeL2(L2_GATEWAY, impl.address);
await upgradeL2(L2_GATEWAY, await impl.getAddress());
// after upgrade
await showGasUsage(
@@ -557,11 +551,11 @@ describe("GasOptimizationUpgrade.spec", async () => {
const L2CustomERC20Gateway = await ethers.getContractFactory("L2CustomERC20Gateway", deployer);
const impl = await L2CustomERC20Gateway.deploy(L1_GATEWAY, L2_ROUTER, L2_MESSENGER);
const gateway = await ethers.getContractAt("L2CustomERC20Gateway", L2_GATEWAY, deployer);
const amountIn = ethers.utils.parseUnits("1", 18);
const amountIn = ethers.parseUnits("1", 18);
const token = await ethers.getContractAt("MockERC20", L2_DAI, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 51);
await token.approve(L1_GATEWAY, constants.MaxUint256);
await token.approve(L1_ROUTER, constants.MaxUint256);
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 51);
await token.approve(L1_GATEWAY, MaxUint256);
await token.approve(L1_ROUTER, MaxUint256);
// before upgrade
await showGasUsage(
@@ -574,7 +568,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
);
// do upgrade
await upgradeL2(L2_GATEWAY, impl.address);
await upgradeL2(L2_GATEWAY, await impl.getAddress());
// after upgrade
await showGasUsage(
@@ -595,11 +589,11 @@ describe("GasOptimizationUpgrade.spec", async () => {
const L2USDCGateway = await ethers.getContractFactory("L2USDCGateway", deployer);
const impl = await L2USDCGateway.deploy(L1_USDC, L2_USDC, L1_GATEWAY, L2_ROUTER, L2_MESSENGER);
const gateway = await ethers.getContractAt("L2USDCGateway", L2_GATEWAY, deployer);
const amountIn = ethers.utils.parseUnits("1", 6);
const amountIn = ethers.parseUnits("1", 6);
const token = await ethers.getContractAt("MockERC20", L2_USDC, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 9);
await token.approve(L2_GATEWAY, constants.MaxUint256);
await token.approve(L2_ROUTER, constants.MaxUint256);
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 9);
await token.approve(L2_GATEWAY, MaxUint256);
await token.approve(L2_ROUTER, MaxUint256);
// before upgrade
await showGasUsage(
@@ -612,7 +606,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
);
// do upgrade
await upgradeL2(L2_GATEWAY, impl.address);
await upgradeL2(L2_GATEWAY, await impl.getAddress());
// after upgrade
await showGasUsage(
@@ -633,11 +627,11 @@ describe("GasOptimizationUpgrade.spec", async () => {
const L2LidoGateway = await ethers.getContractFactory("L2LidoGateway", deployer);
const impl = await L2LidoGateway.deploy(L1_WSTETH, L2_WSTETH, L1_GATEWAY, L2_ROUTER, L2_MESSENGER);
const gateway = await ethers.getContractAt("L2LidoGateway", L2_GATEWAY, deployer);
const amountIn = ethers.utils.parseUnits("1", 6);
const amountIn = ethers.parseUnits("1", 6);
const token = await ethers.getContractAt("MockERC20", L2_WSTETH, deployer);
await mockERC20Balance(token.address, amountIn.mul(10), 51);
await token.approve(L2_GATEWAY, constants.MaxUint256);
await token.approve(L2_ROUTER, constants.MaxUint256);
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 51);
await token.approve(L2_GATEWAY, MaxUint256);
await token.approve(L2_ROUTER, MaxUint256);
// before upgrade
await showGasUsage(
@@ -650,7 +644,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
);
// do upgrade
await upgradeL2(L2_GATEWAY, impl.address);
await upgradeL2(L2_GATEWAY, await impl.getAddress());
await gateway.initializeV2(deployer.address, deployer.address, deployer.address, deployer.address);
// after upgrade

View File

@@ -1,15 +1,15 @@
/* eslint-disable node/no-unpublished-import */
/* eslint-disable node/no-missing-import */
import { ethers } from "hardhat";
import { GasSwap, ERC2771Forwarder, MockERC20, MockGasSwapTarget } from "../typechain";
import { SignerWithAddress } from "@nomiclabs/hardhat-ethers/signers";
import { HardhatEthersSigner } from "@nomicfoundation/hardhat-ethers/signers";
import { expect } from "chai";
import { BigNumber, constants } from "ethers";
import { splitSignature } from "ethers/lib/utils";
import { MaxUint256, Signature, ZeroAddress, ZeroHash, toBigInt } from "ethers";
import { ethers } from "hardhat";
import { GasSwap, ERC2771Forwarder, MockERC20, MockGasSwapTarget } from "../typechain";
describe("GasSwap.spec", async () => {
let deployer: SignerWithAddress;
let signer: SignerWithAddress;
let deployer: HardhatEthersSigner;
let signer: HardhatEthersSigner;
let forwarder: ERC2771Forwarder;
let swap: GasSwap;
@@ -21,19 +21,15 @@ describe("GasSwap.spec", async () => {
const ERC2771Forwarder = await ethers.getContractFactory("ERC2771Forwarder", deployer);
forwarder = await ERC2771Forwarder.deploy("ERC2771Forwarder");
await forwarder.deployed();
const GasSwap = await ethers.getContractFactory("GasSwap", deployer);
swap = await GasSwap.deploy(forwarder.address);
await swap.deployed();
swap = await GasSwap.deploy(forwarder.getAddress());
const MockGasSwapTarget = await ethers.getContractFactory("MockGasSwapTarget", deployer);
target = await MockGasSwapTarget.deploy();
await target.deployed();
const MockERC20 = await ethers.getContractFactory("MockERC20", deployer);
token = await MockERC20.deploy("x", "y", 18);
await token.deployed();
});
context("auth", async () => {
@@ -43,11 +39,11 @@ describe("GasSwap.spec", async () => {
context("#updateFeeRatio", async () => {
it("should revert, when non-owner call", async () => {
await expect(swap.connect(signer).updateFeeRatio(1)).to.revertedWith("caller is not the owner");
await expect(swap.connect(signer).updateFeeRatio(1)).to.revertedWith("Ownable: caller is not the owner");
});
it("should succeed", async () => {
expect(await swap.feeRatio()).to.eq(constants.AddressZero);
expect(await swap.feeRatio()).to.eq(ZeroAddress);
await expect(swap.updateFeeRatio(100)).to.emit(swap, "UpdateFeeRatio").withArgs(100);
expect(await swap.feeRatio()).to.eq(100);
});
@@ -55,66 +51,62 @@ describe("GasSwap.spec", async () => {
context("#updateApprovedTarget", async () => {
it("should revert, when non-owner call", async () => {
await expect(swap.connect(signer).updateApprovedTarget(target.address, false)).to.revertedWith(
"caller is not the owner"
await expect(swap.connect(signer).updateApprovedTarget(target.getAddress(), false)).to.revertedWith(
"Ownable: caller is not the owner"
);
});
it("should succeed", async () => {
expect(await swap.approvedTargets(target.address)).to.eq(false);
await expect(swap.updateApprovedTarget(target.address, true))
expect(await swap.approvedTargets(target.getAddress())).to.eq(false);
await expect(swap.updateApprovedTarget(target.getAddress(), true))
.to.emit(swap, "UpdateApprovedTarget")
.withArgs(target.address, true);
expect(await swap.approvedTargets(target.address)).to.eq(true);
await expect(swap.updateApprovedTarget(target.address, false))
.withArgs(await target.getAddress(), true);
expect(await swap.approvedTargets(target.getAddress())).to.eq(true);
await expect(swap.updateApprovedTarget(target.getAddress(), false))
.to.emit(swap, "UpdateApprovedTarget")
.withArgs(target.address, false);
expect(await swap.approvedTargets(target.address)).to.eq(false);
.withArgs(await target.getAddress(), false);
expect(await swap.approvedTargets(target.getAddress())).to.eq(false);
});
});
context("#withdraw", async () => {
it("should revert, when non-owner call", async () => {
await expect(swap.connect(signer).withdraw(constants.AddressZero, 0)).to.revertedWith(
"caller is not the owner"
);
await expect(swap.connect(signer).withdraw(ZeroAddress, 0)).to.revertedWith("Ownable: caller is not the owner");
});
it("should succeed, when withdraw ETH", async () => {
await deployer.sendTransaction({ to: swap.address, value: ethers.utils.parseEther("1") });
const balanceBefore = await deployer.getBalance();
const tx = await swap.withdraw(constants.AddressZero, ethers.utils.parseEther("1"));
await deployer.sendTransaction({ to: swap.getAddress(), value: ethers.parseEther("1") });
const balanceBefore = await ethers.provider.getBalance(deployer.address);
const tx = await swap.withdraw(ZeroAddress, ethers.parseEther("1"));
const receipt = await tx.wait();
const balanceAfter = await deployer.getBalance();
expect(balanceAfter.sub(balanceBefore)).to.eq(
ethers.utils.parseEther("1").sub(receipt.gasUsed.mul(receipt.effectiveGasPrice))
);
const balanceAfter = await ethers.provider.getBalance(deployer.address);
expect(balanceAfter - balanceBefore).to.eq(ethers.parseEther("1") - receipt!.gasUsed * receipt!.gasPrice);
});
it("should succeed, when withdraw token", async () => {
await token.mint(swap.address, ethers.utils.parseEther("1"));
await token.mint(swap.getAddress(), ethers.parseEther("1"));
const balanceBefore = await token.balanceOf(deployer.address);
await swap.withdraw(token.address, ethers.utils.parseEther("1"));
await swap.withdraw(token.getAddress(), ethers.parseEther("1"));
const balanceAfter = await token.balanceOf(deployer.address);
expect(balanceAfter.sub(balanceBefore)).to.eq(ethers.utils.parseEther("1"));
expect(balanceAfter - balanceBefore).to.eq(ethers.parseEther("1"));
});
});
});
const permit = async (amount: BigNumber) => {
const permit = async (amount: bigint) => {
const value = {
owner: signer.address,
spender: swap.address,
spender: await swap.getAddress(),
value: amount,
nonce: await token.nonces(signer.address),
deadline: constants.MaxUint256,
deadline: MaxUint256,
};
const domain = {
name: await token.name(),
version: "1",
chainId: (await ethers.provider.getNetwork()).chainId,
verifyingContract: token.address,
verifyingContract: await token.getAddress(),
};
const types = {
@@ -142,7 +134,7 @@ describe("GasSwap.spec", async () => {
],
};
const signature = splitSignature(await signer._signTypedData(domain, types, value));
const signature = Signature.from(await signer.signTypedData(domain, types, value));
return signature;
};
@@ -151,15 +143,15 @@ describe("GasSwap.spec", async () => {
await expect(
swap.swap(
{
token: token.address,
token: token.getAddress(),
value: 0,
deadline: 0,
r: constants.HashZero,
s: constants.HashZero,
r: ZeroHash,
s: ZeroHash,
v: 0,
},
{
target: target.address,
target: target.getAddress(),
data: "0x",
minOutput: 0,
}
@@ -168,121 +160,119 @@ describe("GasSwap.spec", async () => {
});
it("should revert, when insufficient output amount", async () => {
const amountIn = ethers.utils.parseEther("1");
const amountOut = ethers.utils.parseEther("2");
const amountIn = ethers.parseEther("1");
const amountOut = ethers.parseEther("2");
await token.mint(signer.address, amountIn);
await deployer.sendTransaction({ to: target.address, value: amountOut });
await deployer.sendTransaction({ to: target.getAddress(), value: amountOut });
const signature = await permit(amountIn);
await target.setToken(token.address);
await target.setToken(token.getAddress());
await target.setAmountIn(amountIn);
await swap.updateApprovedTarget(target.address, true);
await swap.updateApprovedTarget(target.getAddress(), true);
await expect(
swap.connect(signer).swap(
{
token: token.address,
token: await token.getAddress(),
value: amountIn,
deadline: constants.MaxUint256,
deadline: MaxUint256,
r: signature.r,
s: signature.s,
v: signature.v,
},
{
target: target.address,
target: target.getAddress(),
data: "0x8119c065",
minOutput: amountOut.add(1),
minOutput: amountOut + 1n,
}
)
).to.revertedWith("insufficient output amount");
});
for (const refundRatio of ["0", "1", "5"]) {
for (const refundRatio of [0n, 1n, 5n]) {
for (const feeRatio of ["0", "5", "50"]) {
it(`should succeed, when swap by signer directly, with feeRatio[${feeRatio}%] refundRatio[${refundRatio}%]`, async () => {
const amountIn = ethers.utils.parseEther("1");
const amountOut = ethers.utils.parseEther("2");
const amountIn = ethers.parseEther("1");
const amountOut = ethers.parseEther("2");
await token.mint(signer.address, amountIn);
await deployer.sendTransaction({ to: target.address, value: amountOut });
await deployer.sendTransaction({ to: target.getAddress(), value: amountOut });
const signature = await permit(amountIn);
await target.setToken(token.address);
await target.setToken(token.getAddress());
await target.setAmountIn(amountIn);
await target.setRefund(amountIn.mul(refundRatio).div(100));
await target.setRefund((amountIn * refundRatio) / 100n);
await swap.updateApprovedTarget(target.address, true);
await swap.updateFeeRatio(ethers.utils.parseEther(feeRatio).div(100));
const fee = amountOut.mul(feeRatio).div(100);
await swap.updateApprovedTarget(target.getAddress(), true);
await swap.updateFeeRatio(ethers.parseEther(feeRatio) / 100n);
const fee = (amountOut * toBigInt(feeRatio)) / 100n;
const balanceBefore = await signer.getBalance();
const balanceBefore = await ethers.provider.getBalance(signer.address);
const tx = await swap.connect(signer).swap(
{
token: token.address,
token: await token.getAddress(),
value: amountIn,
deadline: constants.MaxUint256,
deadline: MaxUint256,
r: signature.r,
s: signature.s,
v: signature.v,
},
{
target: target.address,
target: target.getAddress(),
data: "0x8119c065",
minOutput: amountOut.sub(fee),
minOutput: amountOut - fee,
}
);
const receipt = await tx.wait();
const balanceAfter = await signer.getBalance();
expect(balanceAfter.sub(balanceBefore)).to.eq(
amountOut.sub(fee).sub(receipt.gasUsed.mul(receipt.effectiveGasPrice))
);
expect(await token.balanceOf(signer.address)).to.eq(amountIn.mul(refundRatio).div(100));
const balanceAfter = await ethers.provider.getBalance(signer.address);
expect(balanceAfter - balanceBefore).to.eq(amountOut - fee - receipt!.gasUsed * receipt!.gasPrice);
expect(await token.balanceOf(signer.address)).to.eq((amountIn * refundRatio) / 100n);
});
it(`should succeed, when swap by signer with forwarder, with feeRatio[${feeRatio}%] refundRatio[${refundRatio}%]`, async () => {
const amountIn = ethers.utils.parseEther("1");
const amountOut = ethers.utils.parseEther("2");
const amountIn = ethers.parseEther("1");
const amountOut = ethers.parseEther("2");
await token.mint(signer.address, amountIn);
await deployer.sendTransaction({ to: target.address, value: amountOut });
await deployer.sendTransaction({ to: await target.getAddress(), value: amountOut });
const permitSignature = await permit(amountIn);
await target.setToken(token.address);
await target.setToken(token.getAddress());
await target.setAmountIn(amountIn);
await target.setRefund(amountIn.mul(refundRatio).div(100));
await target.setRefund((amountIn * refundRatio) / 100n);
await swap.updateApprovedTarget(target.address, true);
await swap.updateFeeRatio(ethers.utils.parseEther(feeRatio).div(100));
const fee = amountOut.mul(feeRatio).div(100);
await swap.updateApprovedTarget(target.getAddress(), true);
await swap.updateFeeRatio(ethers.parseEther(feeRatio) / 100n);
const fee = (amountOut * toBigInt(feeRatio)) / 100n;
const reqWithoutSignature = {
from: signer.address,
to: swap.address,
value: constants.Zero,
to: await swap.getAddress(),
value: 0n,
gas: 1000000,
nonce: await forwarder.nonces(signer.address),
deadline: 2000000000,
data: swap.interface.encodeFunctionData("swap", [
{
token: token.address,
token: await token.getAddress(),
value: amountIn,
deadline: constants.MaxUint256,
deadline: MaxUint256,
r: permitSignature.r,
s: permitSignature.s,
v: permitSignature.v,
},
{
target: target.address,
target: await target.getAddress(),
data: "0x8119c065",
minOutput: amountOut.sub(fee),
minOutput: amountOut - fee,
},
]),
};
const signature = await signer._signTypedData(
const signature = await signer.signTypedData(
{
name: "ERC2771Forwarder",
version: "1",
chainId: (await ethers.provider.getNetwork()).chainId,
verifyingContract: forwarder.address,
verifyingContract: await forwarder.getAddress(),
},
{
ForwardRequest: [
@@ -319,7 +309,7 @@ describe("GasSwap.spec", async () => {
reqWithoutSignature
);
const balanceBefore = await signer.getBalance();
const balanceBefore = await ethers.provider.getBalance(signer.address);
await forwarder.execute({
from: reqWithoutSignature.from,
to: reqWithoutSignature.to,
@@ -329,9 +319,9 @@ describe("GasSwap.spec", async () => {
data: reqWithoutSignature.data,
signature,
});
const balanceAfter = await signer.getBalance();
expect(balanceAfter.sub(balanceBefore)).to.eq(amountOut.sub(fee));
expect(await token.balanceOf(signer.address)).to.eq(amountIn.mul(refundRatio).div(100));
const balanceAfter = await ethers.provider.getBalance(signer.address);
expect(balanceAfter - balanceBefore).to.eq(amountOut - fee);
expect(await token.balanceOf(signer.address)).to.eq((amountIn * refundRatio) / 100n);
});
}
}

View File

@@ -1,9 +1,9 @@
/* eslint-disable node/no-unpublished-import */
/* eslint-disable node/no-missing-import */
import { expect } from "chai";
import { BigNumber, BigNumberish, constants } from "ethers";
import { concat, RLP } from "ethers/lib/utils";
import { BigNumberish, ZeroHash, concat, encodeRlp, toBeHex, toBigInt } from "ethers";
import { ethers } from "hardhat";
import { L1BlockContainer } from "../typechain";
interface IImportTestConfig {
@@ -90,7 +90,7 @@ const testcases: Array<IImportTestConfig> = [
];
function encodeHeader(test: IImportTestConfig): string {
return RLP.encode([
return encodeRlp([
test.parentHash,
test.uncleHash,
test.coinbase,
@@ -98,15 +98,15 @@ function encodeHeader(test: IImportTestConfig): string {
test.transactionsRoot,
test.receiptsRoot,
test.logsBloom,
BigNumber.from(test.difficulty).isZero() ? "0x" : BigNumber.from(test.difficulty).toHexString(),
BigNumber.from(test.blockHeight).toHexString(),
BigNumber.from(test.gasLimit).toHexString(),
BigNumber.from(test.gasUsed).toHexString(),
BigNumber.from(test.blockTimestamp).toHexString(),
toBigInt(test.difficulty) === 0n ? "0x" : toBeHex(test.difficulty),
toBeHex(test.blockHeight),
toBeHex(test.gasLimit),
toBeHex(test.gasUsed),
toBeHex(test.blockTimestamp),
test.extraData,
test.mixHash,
test.blockNonce,
BigNumber.from(test.baseFee).toHexString(),
toBeHex(test.baseFee),
]);
}
@@ -124,7 +124,7 @@ describe("L1BlockContainer", async () => {
const whitelist = await Whitelist.deploy(deployer.address);
await whitelist.updateWhitelistStatus([deployer.address], true);
await container.updateWhitelist(whitelist.address);
await container.updateWhitelist(whitelist.getAddress());
});
it("should revert, when sender not allowed", async () => {
@@ -137,7 +137,7 @@ describe("L1BlockContainer", async () => {
test.stateRoot
);
await expect(container.connect(signer).importBlockHeader(constants.HashZero, [], false)).to.revertedWith(
await expect(container.connect(signer).importBlockHeader(ZeroHash, "0x", false)).to.revertedWith(
"Not whitelisted sender"
);
});
@@ -172,7 +172,7 @@ describe("L1BlockContainer", async () => {
it("should revert, when parent not imported", async () => {
await container.initialize(
constants.HashZero,
ZeroHash,
test.blockHeight - 1,
test.blockTimestamp - 1,
test.baseFee,

View File

@@ -1,18 +1,29 @@
/* eslint-disable node/no-unpublished-import */
/* eslint-disable node/no-missing-import */
import { HardhatEthersSigner } from "@nomicfoundation/hardhat-ethers/signers";
import { expect } from "chai";
import { BigNumber, constants } from "ethers";
import { concat, getAddress, hexlify, keccak256, randomBytes, RLP, stripZeros } from "ethers/lib/utils";
import { ethers } from "hardhat";
import { L1MessageQueue, L2GasPriceOracle } from "../typechain";
import { SignerWithAddress } from "@nomiclabs/hardhat-ethers/signers";
import {
MaxUint256,
ZeroAddress,
concat,
encodeRlp,
getAddress,
hexlify,
keccak256,
randomBytes,
toBeHex,
toBigInt,
} from "ethers";
describe("L1MessageQueue", async () => {
let deployer: SignerWithAddress;
let scrollChain: SignerWithAddress;
let messenger: SignerWithAddress;
let gateway: SignerWithAddress;
let signer: SignerWithAddress;
let deployer: HardhatEthersSigner;
let scrollChain: HardhatEthersSigner;
let messenger: HardhatEthersSigner;
let gateway: HardhatEthersSigner;
let signer: HardhatEthersSigner;
let oracle: L2GasPriceOracle;
let queue: L1MessageQueue;
@@ -21,10 +32,8 @@ describe("L1MessageQueue", async () => {
const TransparentUpgradeableProxy = await ethers.getContractFactory("TransparentUpgradeableProxy", deployer);
const Factory = await ethers.getContractFactory(name, deployer);
const impl = args.length > 0 ? await Factory.deploy(...args) : await Factory.deploy();
await impl.deployed();
const proxy = await TransparentUpgradeableProxy.deploy(impl.address, admin, "0x");
await proxy.deployed();
return proxy.address;
const proxy = await TransparentUpgradeableProxy.deploy(impl.getAddress(), admin, "0x");
return proxy.getAddress();
};
beforeEach(async () => {
@@ -32,22 +41,25 @@ describe("L1MessageQueue", async () => {
const ProxyAdmin = await ethers.getContractFactory("ProxyAdmin", deployer);
const admin = await ProxyAdmin.deploy();
await admin.deployed();
queue = await ethers.getContractAt(
"L1MessageQueue",
await deployProxy("L1MessageQueue", admin.address, [messenger.address, scrollChain.address, gateway.address]),
await deployProxy("L1MessageQueue", await admin.getAddress(), [
messenger.address,
scrollChain.address,
gateway.address,
]),
deployer
);
oracle = await ethers.getContractAt(
"L2GasPriceOracle",
await deployProxy("L2GasPriceOracle", admin.address, []),
await deployProxy("L2GasPriceOracle", await admin.getAddress(), []),
deployer
);
await oracle.initialize(21000, 50000, 8, 16);
await queue.initialize(messenger.address, scrollChain.address, constants.AddressZero, oracle.address, 10000000);
await queue.initialize(messenger.address, scrollChain.address, ZeroAddress, oracle.getAddress(), 10000000);
});
context("auth", async () => {
@@ -56,28 +68,28 @@ describe("L1MessageQueue", async () => {
expect(await queue.messenger()).to.eq(messenger.address);
expect(await queue.scrollChain()).to.eq(scrollChain.address);
expect(await queue.enforcedTxGateway()).to.eq(gateway.address);
expect(await queue.gasOracle()).to.eq(oracle.address);
expect(await queue.gasOracle()).to.eq(await oracle.getAddress());
expect(await queue.maxGasLimit()).to.eq(10000000);
});
it("should revert, when initialize again", async () => {
await expect(
queue.initialize(constants.AddressZero, constants.AddressZero, constants.AddressZero, constants.AddressZero, 0)
).to.revertedWith("Initializable: contract is already initialized");
await expect(queue.initialize(ZeroAddress, ZeroAddress, ZeroAddress, ZeroAddress, 0)).to.revertedWith(
"Initializable: contract is already initialized"
);
});
context("#updateGasOracle", async () => {
it("should revert, when non-owner call", async () => {
await expect(queue.connect(signer).updateGasOracle(constants.AddressZero)).to.revertedWith(
await expect(queue.connect(signer).updateGasOracle(ZeroAddress)).to.revertedWith(
"Ownable: caller is not the owner"
);
});
it("should succeed", async () => {
expect(await queue.gasOracle()).to.eq(oracle.address);
expect(await queue.gasOracle()).to.eq(await oracle.getAddress());
await expect(queue.updateGasOracle(deployer.address))
.to.emit(queue, "UpdateGasOracle")
.withArgs(oracle.address, deployer.address);
.withArgs(await oracle.getAddress(), deployer.address);
expect(await queue.gasOracle()).to.eq(deployer.address);
});
});
@@ -101,30 +113,9 @@ describe("L1MessageQueue", async () => {
const target = "0xcb18150e4efefb6786130e289a5f61a82a5b86d7";
const transactionType = "0x7E";
for (const nonce of [
BigNumber.from(0),
BigNumber.from(1),
BigNumber.from(127),
BigNumber.from(128),
BigNumber.from(22334455),
constants.MaxUint256,
]) {
for (const value of [
BigNumber.from(0),
BigNumber.from(1),
BigNumber.from(127),
BigNumber.from(128),
BigNumber.from(22334455),
constants.MaxUint256,
]) {
for (const gasLimit of [
BigNumber.from(0),
BigNumber.from(1),
BigNumber.from(127),
BigNumber.from(128),
BigNumber.from(22334455),
constants.MaxUint256,
]) {
for (const nonce of [0n, 1n, 127n, 128n, 22334455n, MaxUint256]) {
for (const value of [0n, 1n, 127n, 128n, 22334455n, MaxUint256]) {
for (const gasLimit of [0n, 1n, 127n, 128n, 22334455n, MaxUint256]) {
for (const dataLen of [0, 1, 2, 3, 4, 55, 56, 100]) {
const tests = [randomBytes(dataLen)];
if (dataLen === 1) {
@@ -133,11 +124,11 @@ describe("L1MessageQueue", async () => {
}
}
for (const data of tests) {
const transactionPayload = RLP.encode([
stripZeros(nonce.toHexString()),
stripZeros(gasLimit.toHexString()),
const transactionPayload = encodeRlp([
nonce === 0n ? "0x" : toBeHex(nonce),
gasLimit === 0n ? "0x" : toBeHex(gasLimit),
target,
stripZeros(value.toHexString()),
value === 0n ? "0x" : toBeHex(value),
data,
sender,
]);
@@ -159,30 +150,27 @@ describe("L1MessageQueue", async () => {
context("#appendCrossDomainMessage", async () => {
it("should revert, when non-messenger call", async () => {
await expect(queue.connect(signer).appendCrossDomainMessage(constants.AddressZero, 0, "0x")).to.revertedWith(
await expect(queue.connect(signer).appendCrossDomainMessage(ZeroAddress, 0, "0x")).to.revertedWith(
"Only callable by the L1ScrollMessenger"
);
});
it("should revert, when exceed maxGasLimit", async () => {
await expect(
queue.connect(messenger).appendCrossDomainMessage(constants.AddressZero, 10000001, "0x")
).to.revertedWith("Gas limit must not exceed maxGasLimit");
await expect(queue.connect(messenger).appendCrossDomainMessage(ZeroAddress, 10000001, "0x")).to.revertedWith(
"Gas limit must not exceed maxGasLimit"
);
});
it("should revert, when below intrinsic gas", async () => {
await expect(queue.connect(messenger).appendCrossDomainMessage(constants.AddressZero, 0, "0x")).to.revertedWith(
await expect(queue.connect(messenger).appendCrossDomainMessage(ZeroAddress, 0, "0x")).to.revertedWith(
"Insufficient gas limit, must be above intrinsic gas"
);
});
it("should succeed", async () => {
expect(await queue.nextCrossDomainMessageIndex()).to.eq(constants.Zero);
expect(await queue.nextCrossDomainMessageIndex()).to.eq(0n);
const sender = getAddress(
BigNumber.from(messenger.address)
.add("0x1111000000000000000000000000000000001111")
.mod(BigNumber.from(2).pow(160))
.toHexString()
toBeHex((toBigInt(messenger.address) + toBigInt("0x1111000000000000000000000000000000001111")) % 2n ** 160n)
.slice(2)
.padStart(40, "0")
);
@@ -190,7 +178,7 @@ describe("L1MessageQueue", async () => {
await expect(queue.connect(messenger).appendCrossDomainMessage(signer.address, 100000, "0x01"))
.to.emit(queue, "QueueTransaction")
.withArgs(sender, signer.address, 0, 0, 100000, "0x01");
expect(await queue.nextCrossDomainMessageIndex()).to.eq(constants.One);
expect(await queue.nextCrossDomainMessageIndex()).to.eq(1n);
expect(await queue.getCrossDomainMessage(0)).to.eq(hash);
});
});
@@ -198,30 +186,30 @@ describe("L1MessageQueue", async () => {
context("#appendEnforcedTransaction", async () => {
it("should revert, when non-gateway call", async () => {
await expect(
queue.connect(signer).appendEnforcedTransaction(signer.address, constants.AddressZero, 0, 0, "0x")
queue.connect(signer).appendEnforcedTransaction(signer.address, ZeroAddress, 0, 0, "0x")
).to.revertedWith("Only callable by the EnforcedTxGateway");
});
it("should revert, when sender is not EOA", async () => {
await expect(
queue.connect(gateway).appendEnforcedTransaction(queue.address, constants.AddressZero, 0, 0, "0x")
queue.connect(gateway).appendEnforcedTransaction(queue.getAddress(), ZeroAddress, 0, 0, "0x")
).to.revertedWith("only EOA");
});
it("should revert, when exceed maxGasLimit", async () => {
await expect(
queue.connect(gateway).appendEnforcedTransaction(signer.address, constants.AddressZero, 0, 10000001, "0x")
queue.connect(gateway).appendEnforcedTransaction(signer.address, ZeroAddress, 0, 10000001, "0x")
).to.revertedWith("Gas limit must not exceed maxGasLimit");
});
it("should revert, when below intrinsic gas", async () => {
await expect(
queue.connect(gateway).appendEnforcedTransaction(signer.address, constants.AddressZero, 0, 0, "0x")
queue.connect(gateway).appendEnforcedTransaction(signer.address, ZeroAddress, 0, 0, "0x")
).to.revertedWith("Insufficient gas limit, must be above intrinsic gas");
});
it("should succeed", async () => {
expect(await queue.nextCrossDomainMessageIndex()).to.eq(constants.Zero);
expect(await queue.nextCrossDomainMessageIndex()).to.eq(0n);
const sender = signer.address;
const hash = await queue.computeTransactionHash(sender, 0, 200, signer.address, 100000, "0x01");
await expect(
@@ -229,7 +217,7 @@ describe("L1MessageQueue", async () => {
)
.to.emit(queue, "QueueTransaction")
.withArgs(sender, signer.address, 200, 0, 100000, "0x01");
expect(await queue.nextCrossDomainMessageIndex()).to.eq(constants.One);
expect(await queue.nextCrossDomainMessageIndex()).to.eq(1n);
expect(await queue.getCrossDomainMessage(0)).to.eq(hash);
});
});
@@ -254,7 +242,7 @@ describe("L1MessageQueue", async () => {
it("should succeed", async () => {
// append 512 messages
for (let i = 0; i < 256 * 2; i++) {
await queue.connect(messenger).appendCrossDomainMessage(constants.AddressZero, 1000000, "0x");
await queue.connect(messenger).appendCrossDomainMessage(ZeroAddress, 1000000, "0x");
}
// pop 50 messages with no skip
@@ -292,17 +280,12 @@ describe("L1MessageQueue", async () => {
}
// pop 256 messages with random skip
const bitmap = BigNumber.from("0x496525059c3f33758d17030403e45afe067b8a0ae1317cda0487fd2932cbea1a");
const bitmap = toBigInt("0x496525059c3f33758d17030403e45afe067b8a0ae1317cda0487fd2932cbea1a");
const tx = await queue.connect(scrollChain).popCrossDomainMessage(80, 256, bitmap);
await expect(tx).to.emit(queue, "DequeueTransaction").withArgs(80, 256, bitmap);
console.log("gas used:", (await tx.wait()).gasUsed.toString());
console.log("gas used:", (await tx.wait())!.gasUsed.toString());
for (let i = 80; i < 80 + 256; i++) {
expect(await queue.isMessageSkipped(i)).to.eq(
bitmap
.shr(i - 80)
.and(1)
.eq(1)
);
expect(await queue.isMessageSkipped(i)).to.eq(((bitmap >> toBigInt(i - 80)) & 1n) === 1n);
expect(await queue.isMessageDropped(i)).to.eq(false);
}
});
@@ -314,39 +297,39 @@ describe("L1MessageQueue", async () => {
it.skip(`should succeed on random tests, pop three times each with ${count1} ${count2} ${count3} msgs`, async () => {
// append count1 + count2 + count3 messages
for (let i = 0; i < count1 + count2 + count3; i++) {
await queue.connect(messenger).appendCrossDomainMessage(constants.AddressZero, 1000000, "0x");
await queue.connect(messenger).appendCrossDomainMessage(ZeroAddress, 1000000, "0x");
}
// first pop `count1` messages
const bitmap1 = BigNumber.from(randomBytes(32));
const bitmap1 = toBigInt(randomBytes(32));
let tx = await queue.connect(scrollChain).popCrossDomainMessage(0, count1, bitmap1);
await expect(tx)
.to.emit(queue, "DequeueTransaction")
.withArgs(0, count1, bitmap1.and(constants.One.shl(count1).sub(1)));
.withArgs(0, count1, bitmap1 & ((1n << toBigInt(count1)) - 1n));
for (let i = 0; i < count1; i++) {
expect(await queue.isMessageSkipped(i)).to.eq(bitmap1.shr(i).and(1).eq(1));
expect(await queue.isMessageSkipped(i)).to.eq(((bitmap1 >> toBigInt(i)) & 1n) === 1n);
expect(await queue.isMessageDropped(i)).to.eq(false);
}
// then pop `count2` messages
const bitmap2 = BigNumber.from(randomBytes(32));
const bitmap2 = toBigInt(randomBytes(32));
tx = await queue.connect(scrollChain).popCrossDomainMessage(count1, count2, bitmap2);
await expect(tx)
.to.emit(queue, "DequeueTransaction")
.withArgs(count1, count2, bitmap2.and(constants.One.shl(count2).sub(1)));
.withArgs(count1, count2, bitmap2 & ((1n << toBigInt(count2)) - 1n));
for (let i = 0; i < count2; i++) {
expect(await queue.isMessageSkipped(i + count1)).to.eq(bitmap2.shr(i).and(1).eq(1));
expect(await queue.isMessageSkipped(i + count1)).to.eq(((bitmap2 >> toBigInt(i)) & 1n) === 1n);
expect(await queue.isMessageDropped(i + count1)).to.eq(false);
}
// last pop `count3` messages
const bitmap3 = BigNumber.from(randomBytes(32));
const bitmap3 = toBigInt(randomBytes(32));
tx = await queue.connect(scrollChain).popCrossDomainMessage(count1 + count2, count3, bitmap3);
await expect(tx)
.to.emit(queue, "DequeueTransaction")
.withArgs(count1 + count2, count3, bitmap3.and(constants.One.shl(count3).sub(1)));
.withArgs(count1 + count2, count3, bitmap3 & ((1n << toBigInt(count3)) - 1n));
for (let i = 0; i < count3; i++) {
expect(await queue.isMessageSkipped(i + count1 + count2)).to.eq(bitmap3.shr(i).and(1).eq(1));
expect(await queue.isMessageSkipped(i + count1 + count2)).to.eq(((bitmap3 >> toBigInt(i)) & 1n) === 1n);
expect(await queue.isMessageDropped(i + count1 + count2)).to.eq(false);
}
});
@@ -365,7 +348,7 @@ describe("L1MessageQueue", async () => {
it("should revert, when drop non-skipped message", async () => {
// append 10 messages
for (let i = 0; i < 10; i++) {
await queue.connect(messenger).appendCrossDomainMessage(constants.AddressZero, 1000000, "0x");
await queue.connect(messenger).appendCrossDomainMessage(ZeroAddress, 1000000, "0x");
}
// pop 5 messages with no skip
await expect(queue.connect(scrollChain).popCrossDomainMessage(0, 5, 0))
@@ -390,7 +373,7 @@ describe("L1MessageQueue", async () => {
it("should succeed", async () => {
// append 10 messages
for (let i = 0; i < 10; i++) {
await queue.connect(messenger).appendCrossDomainMessage(constants.AddressZero, 1000000, "0x");
await queue.connect(messenger).appendCrossDomainMessage(ZeroAddress, 1000000, "0x");
}
// pop 10 messages, all skipped
await expect(queue.connect(scrollChain).popCrossDomainMessage(0, 10, 0x3ff))

View File

@@ -1,8 +1,9 @@
/* eslint-disable node/no-unpublished-import */
/* eslint-disable node/no-missing-import */
import { expect } from "chai";
import { concat } from "ethers/lib/utils";
import { concat } from "ethers";
import { ethers } from "hardhat";
import { MockPatriciaMerkleTrieVerifier } from "../typechain";
interface ITestConfig {
@@ -121,7 +122,6 @@ describe("PatriciaMerkleTrieVerifier", async () => {
const MockPatriciaMerkleTrieVerifier = await ethers.getContractFactory("MockPatriciaMerkleTrieVerifier", deployer);
verifier = await MockPatriciaMerkleTrieVerifier.deploy();
await verifier.deployed();
});
for (const test of testcases) {

View File

@@ -2,9 +2,9 @@
/* eslint-disable node/no-unpublished-import */
import { expect } from "chai";
import { randomBytes } from "crypto";
import { BigNumber, Contract } from "ethers";
import { ethers } from "hardhat";
import { Contract, toBigInt } from "ethers";
import fs from "fs";
import { ethers } from "hardhat";
import PoseidonWithoutDomain from "circomlib/src/poseidon_gencontract";
import { generateABI, createCode } from "../scripts/poseidon";
@@ -23,12 +23,10 @@ describe("PoseidonHash.spec", async () => {
PoseidonWithoutDomain.createCode(2),
deployer
);
poseidonCircom = await PoseidonWithoutDomainFactory.deploy();
await poseidonCircom.deployed();
poseidonCircom = (await PoseidonWithoutDomainFactory.deploy()) as Contract;
const PoseidonWithDomainFactory = new ethers.ContractFactory(generateABI(2), createCode(2), deployer);
poseidon = await PoseidonWithDomainFactory.deploy();
await poseidon.deployed();
poseidon = (await PoseidonWithDomainFactory.deploy()) as Contract;
});
it("should succeed on zero inputs", async () => {
@@ -40,8 +38,8 @@ describe("PoseidonHash.spec", async () => {
it("should succeed on random inputs", async () => {
for (let bytes = 1; bytes <= 32; ++bytes) {
for (let i = 0; i < 5; ++i) {
const a = randomBytes(bytes);
const b = randomBytes(bytes);
const a = toBigInt(randomBytes(bytes));
const b = toBigInt(randomBytes(bytes));
expect(await poseidonCircom["poseidon(uint256[2])"]([a, b])).to.eq(
await poseidon["poseidon(uint256[2],uint256)"]([a, b], 0)
);
@@ -58,31 +56,20 @@ describe("PoseidonHash.spec", async () => {
// test against with scroll's go implementation.
context("domain = nonzero", async () => {
let poseidonCircom: Contract;
let poseidon: Contract;
beforeEach(async () => {
const [deployer] = await ethers.getSigners();
const PoseidonWithoutDomainFactory = new ethers.ContractFactory(
PoseidonWithoutDomain.generateABI(2),
PoseidonWithoutDomain.createCode(2),
deployer
);
poseidonCircom = await PoseidonWithoutDomainFactory.deploy();
await poseidonCircom.deployed();
const PoseidonWithDomainFactory = new ethers.ContractFactory(generateABI(2), createCode(2), deployer);
poseidon = await PoseidonWithDomainFactory.deploy();
await poseidon.deployed();
poseidon = (await PoseidonWithDomainFactory.deploy()) as Contract;
});
it("should succeed on zero inputs", async () => {
expect(await poseidon["poseidon(uint256[2],uint256)"]([0, 0], 6)).to.eq(
BigNumber.from("17848312925884193353134534408113064827548730776291701343555436351962284922129")
toBigInt("17848312925884193353134534408113064827548730776291701343555436351962284922129")
);
expect(await poseidon["poseidon(uint256[2],uint256)"]([0, 0], 7)).to.eq(
BigNumber.from("20994231331856095272861976502721128670019193481895476667943874333621461724676")
toBigInt("20994231331856095272861976502721128670019193481895476667943874333621461724676")
);
});
@@ -90,7 +77,7 @@ describe("PoseidonHash.spec", async () => {
const lines = String(fs.readFileSync("./integration-test/testdata/poseidon_hash_with_domain.data")).split("\n");
for (const line of lines) {
const [domain, a, b, hash] = line.split(" ");
expect(await poseidon["poseidon(uint256[2],uint256)"]([a, b], domain)).to.eq(BigNumber.from(hash));
expect(await poseidon["poseidon(uint256[2],uint256)"]([a, b], domain)).to.eq(toBigInt(hash));
}
});
});

View File

@@ -0,0 +1,162 @@
/* eslint-disable node/no-unpublished-import */
/* eslint-disable node/no-missing-import */
import { ZeroAddress } from "ethers";
import { ethers } from "hardhat";
import { ScrollChain, L1MessageQueue } from "../typechain";
import { HardhatEthersSigner } from "@nomicfoundation/hardhat-ethers/signers";
import { randomBytes } from "crypto";
import { expect } from "chai";
describe("ScrollChain.blob", async () => {
let deployer: HardhatEthersSigner;
let signer: HardhatEthersSigner;
let queue: L1MessageQueue;
let chain: ScrollChain;
beforeEach(async () => {
[deployer, signer] = await ethers.getSigners();
const EmptyContract = await ethers.getContractFactory("EmptyContract", deployer);
const empty = await EmptyContract.deploy();
const ProxyAdmin = await ethers.getContractFactory("ProxyAdmin", deployer);
const admin = await ProxyAdmin.deploy();
const TransparentUpgradeableProxy = await ethers.getContractFactory("TransparentUpgradeableProxy", deployer);
const queueProxy = await TransparentUpgradeableProxy.deploy(empty.getAddress(), admin.getAddress(), "0x");
const chainProxy = await TransparentUpgradeableProxy.deploy(empty.getAddress(), admin.getAddress(), "0x");
const L1MessageQueue = await ethers.getContractFactory("L1MessageQueue", deployer);
const queueImpl = await L1MessageQueue.deploy(deployer.address, chainProxy.getAddress(), deployer.address);
await admin.upgrade(queueProxy.getAddress(), queueImpl.getAddress());
const ScrollChain = await ethers.getContractFactory("ScrollChain", deployer);
const chainImpl = await ScrollChain.deploy(0, queueProxy.getAddress(), deployer.address);
await admin.upgrade(chainProxy.getAddress(), chainImpl.getAddress());
queue = await ethers.getContractAt("L1MessageQueue", await queueProxy.getAddress(), deployer);
chain = await ethers.getContractAt("ScrollChain", await chainProxy.getAddress(), deployer);
await chain.initialize(queue.getAddress(), ZeroAddress, 100);
await chain.addSequencer(deployer.address);
await chain.addProver(deployer.address);
await queue.initialize(deployer.address, chain.getAddress(), deployer.address, deployer.address, 10000000);
});
context("commit batch", async () => {
let batchHeader0: Uint8Array;
beforeEach(async () => {
// import 10 L1 messages
for (let i = 0; i < 10; i++) {
queue.appendCrossDomainMessage(deployer.address, 1000000, "0x");
}
// import genesis batch first
batchHeader0 = new Uint8Array(89);
batchHeader0[25] = 1;
await chain.importGenesisBatch(batchHeader0, randomBytes(32));
});
it("should revert when caller is not sequencer", async () => {
await expect(chain.connect(signer).commitBatch(1, batchHeader0, [], "0x")).to.revertedWithCustomError(
chain,
"ErrorCallerIsNotSequencer"
);
});
it("should revert when batch is empty", async () => {
await expect(chain.commitBatch(1, batchHeader0, [], "0x")).to.revertedWithCustomError(chain, "ErrorBatchIsEmpty");
});
it("should revert when batch header length too small", async () => {
const header = new Uint8Array(120);
header[0] = 1;
await expect(chain.commitBatch(1, header, ["0x"], "0x")).to.revertedWithCustomError(
chain,
"ErrorBatchHeaderLengthTooSmall"
);
});
it("should revert when wrong bitmap length", async () => {
const header = new Uint8Array(122);
header[0] = 1;
await expect(chain.commitBatch(1, header, ["0x"], "0x")).to.revertedWithCustomError(
chain,
"ErrorIncorrectBitmapLength"
);
});
it("should revert when incorrect parent batch hash", async () => {
batchHeader0[25] = 2;
await expect(chain.commitBatch(1, batchHeader0, ["0x"], "0x")).to.revertedWithCustomError(
chain,
"ErrorIncorrectBatchHash"
);
batchHeader0[25] = 1;
});
it("should revert when ErrorInvalidBatchHeaderVersion", async () => {
const header = new Uint8Array(121);
header[0] = 2;
await expect(chain.commitBatch(1, header, ["0x"], "0x")).to.revertedWithCustomError(
chain,
"ErrorInvalidBatchHeaderVersion"
);
await expect(chain.commitBatch(2, batchHeader0, ["0x"], "0x")).to.revertedWithCustomError(
chain,
"ErrorInvalidBatchHeaderVersion"
);
});
it("should revert when ErrorNoBlobFound", async () => {
await expect(chain.commitBatch(1, batchHeader0, ["0x"], "0x")).to.revertedWithCustomError(
chain,
"ErrorNoBlobFound"
);
});
/* Hardhat doesn't have support for EIP4844 yet.
const makeTransaction = async (data: string, value: bigint, blobVersionedHashes: Array<string>) => {
const tx = new Transaction();
tx.type = 3;
tx.to = await chain.getAddress();
tx.data = data;
tx.nonce = await deployer.getNonce();
tx.gasLimit = 1000000;
tx.maxPriorityFeePerGas = (await ethers.provider.getFeeData()).maxPriorityFeePerGas;
tx.maxFeePerGas = (await ethers.provider.getFeeData()).maxFeePerGas;
tx.value = value;
tx.chainId = (await ethers.provider.getNetwork()).chainId;
tx.maxFeePerBlobGas = ethers.parseUnits("1", "gwei");
tx.blobVersionedHashes = blobVersionedHashes;
return tx;
};
it("should revert when ErrorFoundMultipleBlob", async () => {
const data = chain.interface.encodeFunctionData("commitBatch", [1, batchHeader0, ["0x"], "0x"]);
const tx = await makeTransaction(data, 0n, [ZeroHash, ZeroHash]);
const signature = await deployer.signMessage(tx.unsignedHash);
tx.signature = Signature.from(signature);
const r = await ethers.provider.broadcastTransaction(tx.serialized);
await expect(r).to.revertedWithCustomError(chain, "ErrorFoundMultipleBlob");
});
it("should revert when ErrorNoBlockInChunk", async () => {});
it("should revert when ErrorIncorrectChunkLength", async () => {});
it("should revert when ErrorLastL1MessageSkipped", async () => {});
it("should revert when ErrorNumTxsLessThanNumL1Msgs", async () => {});
it("should revert when ErrorTooManyTxsInOneChunk", async () => {});
it("should revert when ErrorIncorrectBitmapLength", async () => {});
it("should succeed", async () => {});
*/
});
});

View File

@@ -1,8 +1,8 @@
/* eslint-disable node/no-unpublished-import */
/* eslint-disable node/no-missing-import */
import { concat } from "ethers/lib/utils";
import { constants } from "ethers";
import { ZeroAddress, concat, getBytes } from "ethers";
import { ethers } from "hardhat";
import { ScrollChain, L1MessageQueue } from "../typechain";
describe("ScrollChain", async () => {
@@ -14,40 +14,28 @@ describe("ScrollChain", async () => {
const EmptyContract = await ethers.getContractFactory("EmptyContract", deployer);
const empty = await EmptyContract.deploy();
await empty.deployed();
const ProxyAdmin = await ethers.getContractFactory("ProxyAdmin", deployer);
const admin = await ProxyAdmin.deploy();
await admin.deployed();
const TransparentUpgradeableProxy = await ethers.getContractFactory("TransparentUpgradeableProxy", deployer);
const queueProxy = await TransparentUpgradeableProxy.deploy(empty.address, admin.address, "0x");
await queueProxy.deployed();
const chainProxy = await TransparentUpgradeableProxy.deploy(empty.address, admin.address, "0x");
await chainProxy.deployed();
const queueProxy = await TransparentUpgradeableProxy.deploy(empty.getAddress(), admin.getAddress(), "0x");
const chainProxy = await TransparentUpgradeableProxy.deploy(empty.getAddress(), admin.getAddress(), "0x");
const L1MessageQueue = await ethers.getContractFactory("L1MessageQueue", deployer);
const queueImpl = await L1MessageQueue.deploy(constants.AddressZero, chainProxy.address, deployer.address);
await queueImpl.deployed();
await admin.upgrade(queueProxy.address, queueImpl.address);
const queueImpl = await L1MessageQueue.deploy(ZeroAddress, chainProxy.getAddress(), deployer.address);
await admin.upgrade(queueProxy.getAddress(), queueImpl.getAddress());
const ScrollChain = await ethers.getContractFactory("ScrollChain", deployer);
const chainImpl = await ScrollChain.deploy(0, queueProxy.address, deployer.address);
await chainImpl.deployed();
await admin.upgrade(chainProxy.address, chainImpl.address);
const chainImpl = await ScrollChain.deploy(0, queueProxy.getAddress(), deployer.address);
await admin.upgrade(chainProxy.getAddress(), chainImpl.getAddress());
queue = await ethers.getContractAt("L1MessageQueue", queueProxy.address, deployer);
chain = await ethers.getContractAt("ScrollChain", chainProxy.address, deployer);
queue = await ethers.getContractAt("L1MessageQueue", await queueProxy.getAddress(), deployer);
chain = await ethers.getContractAt("ScrollChain", await chainProxy.getAddress(), deployer);
await chain.initialize(queue.address, constants.AddressZero, 100);
await chain.initialize(queue.getAddress(), ZeroAddress, 100);
await chain.addSequencer(deployer.address);
await queue.initialize(
constants.AddressZero,
chain.address,
constants.AddressZero,
constants.AddressZero,
10000000
);
await queue.initialize(ZeroAddress, chain.getAddress(), ZeroAddress, ZeroAddress, 10000000);
});
// @note skip this benchmark tests
@@ -82,12 +70,12 @@ describe("ScrollChain", async () => {
for (let i = 0; i < numChunks; i++) {
const txsInChunk: Array<Uint8Array> = [];
for (let j = 0; j < numBlocks; j++) {
txsInChunk.push(concat(txs));
txsInChunk.push(getBytes(concat(txs)));
}
chunks.push(concat([chunk, concat(txsInChunk)]));
chunks.push(getBytes(concat([chunk, concat(txsInChunk)])));
}
const estimateGas = await chain.estimateGas.commitBatch(0, batchHeader0, chunks, "0x");
const estimateGas = await chain.commitBatch.estimateGas(0, batchHeader0, chunks, "0x");
console.log(
`${numChunks}`,
`${numBlocks}`,

View File

@@ -1,14 +1,15 @@
/* eslint-disable node/no-unpublished-import */
/* eslint-disable node/no-missing-import */
import { HardhatEthersSigner } from "@nomicfoundation/hardhat-ethers/signers";
import { expect } from "chai";
import { hexlify } from "ethers/lib/utils";
import { ethers } from "hardhat";
import { ZkEvmVerifierV1 } from "../typechain";
import { SignerWithAddress } from "@nomiclabs/hardhat-ethers/signers";
import { hexlify } from "ethers";
import fs from "fs";
import { ethers } from "hardhat";
import { ZkEvmVerifierV1 } from "../typechain";
describe("ZkEvmVerifierV1", async () => {
let deployer: SignerWithAddress;
let deployer: HardhatEthersSigner;
let zkEvmVerifier: ZkEvmVerifierV1;
@@ -20,8 +21,7 @@ describe("ZkEvmVerifierV1", async () => {
const receipt = await tx.wait();
const ZkEvmVerifierV1 = await ethers.getContractFactory("ZkEvmVerifierV1", deployer);
zkEvmVerifier = await ZkEvmVerifierV1.deploy(receipt.contractAddress);
await zkEvmVerifier.deployed();
zkEvmVerifier = await ZkEvmVerifierV1.deploy(receipt!.contractAddress!);
});
it("should succeed", async () => {
@@ -37,7 +37,7 @@ describe("ZkEvmVerifierV1", async () => {
// verify ok
await zkEvmVerifier.verify(proof, publicInputHash);
console.log("Gas Usage:", (await zkEvmVerifier.estimateGas.verify(proof, publicInputHash)).toString());
console.log("Gas Usage:", (await zkEvmVerifier.verify.estimateGas(proof, publicInputHash)).toString());
// verify failed
await expect(zkEvmVerifier.verify(proof, publicInputHash.reverse())).to.reverted;

View File

@@ -1,11 +1,11 @@
/* eslint-disable node/no-unpublished-import */
/* eslint-disable node/no-missing-import */
import { expect } from "chai";
import { concat } from "ethers/lib/utils";
import { ethers } from "hardhat";
import { MockZkTrieVerifier } from "../typechain";
import { generateABI, createCode } from "../scripts/poseidon";
import { MockZkTrieVerifier } from "../typechain";
import { concat } from "ethers";
const chars = "0123456789abcdef";
@@ -273,13 +273,10 @@ describe("ZkTrieVerifier", async () => {
const [deployer] = await ethers.getSigners();
const PoseidonHashWithDomainFactory = new ethers.ContractFactory(generateABI(2), createCode(2), deployer);
const poseidon = await PoseidonHashWithDomainFactory.deploy();
await poseidon.deployed();
const MockZkTrieVerifier = await ethers.getContractFactory("MockZkTrieVerifier", deployer);
verifier = await MockZkTrieVerifier.deploy(poseidon.address);
await verifier.deployed();
verifier = await MockZkTrieVerifier.deploy(poseidon.getAddress());
});
const shouldRevert = async (test: ITestConfig, reason: string, extra?: string) => {
@@ -456,7 +453,7 @@ describe("ZkTrieVerifier", async () => {
it("should revert, when InvalidAccountKeyPreimage", async () => {
const test = testcases[0];
const index = test.accountProof.length - 2;
const correct = test.accountProof[index];
const correct = test.accountProof[index].slice();
for (const p of [398, 438]) {
const v = correct[p];
for (let b = 0; b < 3; ++b) {
@@ -471,7 +468,7 @@ describe("ZkTrieVerifier", async () => {
it("should revert, when InvalidProofMagicBytes", async () => {
const test = testcases[0];
let index = test.accountProof.length - 1;
let correct = test.accountProof[index];
let correct = test.accountProof[index].slice();
for (const p of [2, 32, 91]) {
const v = correct[p];
for (let b = 0; b < 3; ++b) {
@@ -483,7 +480,7 @@ describe("ZkTrieVerifier", async () => {
}
index = test.storageProof.length - 1;
correct = test.storageProof[index];
correct = test.storageProof[index].slice();
for (const p of [2, 32, 91]) {
const v = correct[p];
for (let b = 0; b < 3; ++b) {
@@ -497,13 +494,14 @@ describe("ZkTrieVerifier", async () => {
it("should revert, when InvalidAccountLeafNodeHash", async () => {
const test = testcases[0];
const correct = test.storageProof.slice();
test.storageProof = [
"0x05",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449",
];
const correct = test.accountProof[test.accountProof.length - 2];
// change nonce
test.accountProof[test.accountProof.length - 2] = correct.replace(
"0x0420e9fb498ff9c35246d527da24aa1710d2cc9b055ecf9a95a8a2a11d3d836cdf050800000",
"0x0420e9fb498ff9c35246d527da24aa1710d2cc9b055ecf9a95a8a2a11d3d836cdf050800001"
);
await shouldRevert(test, "InvalidAccountLeafNodeHash");
test.storageProof = correct;
test.accountProof[test.accountProof.length - 2] = correct;
});
it("should revert, when InvalidStorageLeafNodeType", async () => {

View File

@@ -5,7 +5,7 @@
"license": "MIT",
"scripts": {
"test:hardhat": "npx hardhat test",
"test:forge": "forge test -vvv",
"test:forge": "forge test -vvv --evm-version cancun",
"test": "yarn test:hardhat && yarn test:forge",
"solhint": "./node_modules/.bin/solhint -f table 'src/**/*.sol'",
"lint:sol": "./node_modules/.bin/prettier --write 'src/**/*.sol'",
@@ -16,44 +16,47 @@
"prepare": "cd .. && husky install contracts/.husky"
},
"devDependencies": {
"@nomiclabs/hardhat-ethers": "^2.0.0",
"@nomiclabs/hardhat-etherscan": "^3.0.0",
"@nomiclabs/hardhat-waffle": "^2.0.0",
"@nomicfoundation/hardhat-chai-matchers": "^2.0.6",
"@nomicfoundation/hardhat-ethers": "^3.0.5",
"@nomicfoundation/hardhat-verify": "^2.0.5",
"@primitivefi/hardhat-dodoc": "^0.2.3",
"@typechain/ethers-v5": "^7.0.1",
"@typechain/hardhat": "^2.3.0",
"@typechain/ethers-v6": "^0.5.1",
"@typechain/hardhat": "^9.1.0",
"@types/chai": "^4.2.21",
"@types/edit-json-file": "^1.7.0",
"@types/mocha": "^9.0.0",
"@types/node": "^12.0.0",
"@typescript-eslint/eslint-plugin": "^4.29.1",
"@typescript-eslint/parser": "^4.29.1",
"@types/node": "^20.11.27",
"@typescript-eslint/eslint-plugin": "^7.2.0",
"@typescript-eslint/parser": "^7.2.0",
"chai": "^4.2.0",
"circom": "^0.5.46",
"circomlib": "^0.5.0",
"dotenv": "^10.0.0",
"edit-json-file": "^1.7.0",
"eslint": "^7.29.0",
"eslint": "^8.57.0",
"eslint-config-prettier": "^8.3.0",
"eslint-config-standard": "^16.0.3",
"eslint-config-standard": "^17.1.0",
"eslint-plugin-import": "^2.23.4",
"eslint-plugin-n": "^16.6.2",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-prettier": "^3.4.0",
"eslint-plugin-promise": "^5.1.0",
"eslint-plugin-promise": "^6.1.1",
"ethereum-waffle": "^3.0.0",
"ethers": "^5.0.0",
"hardhat": "^2.9.3",
"ethers": "^6.11.1",
"hardhat": "^2.22.0",
"hardhat-gas-reporter": "^1.0.4",
"husky": "^8.0.1",
"lint-staged": "^13.0.3",
"lodash": "^4.17.21",
"prettier": "^2.3.2",
"prettier-plugin-solidity": "^1.0.0-beta.13",
"solhint": "^3.3.6",
"solidity-coverage": "^0.7.16",
"solidity-coverage": "^0.8.11",
"squirrelly": "8.0.8",
"toml": "^3.0.0",
"ts-node": "^10.1.0",
"typechain": "^5.1.2",
"typescript": "^4.5.2"
"typechain": "^8.3.2",
"typescript": "^5.4.2"
},
"dependencies": {
"@openzeppelin/contracts": "^v4.9.3",
@@ -63,5 +66,8 @@
"*.{js,ts}": "npx eslint --cache --fix",
"!(docs/apis/*).md": "prettier --ignore-unknown --write",
"*.sol": "prettier --ignore-unknown --write"
},
"engines": {
"node": ">=10.4.0"
}
}

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity =0.8.24;
// solhint-disable no-console

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity =0.8.24;
// solhint-disable no-console
@@ -92,7 +92,11 @@ contract DeployL1BridgeContracts is Script {
}
function deployMultipleVersionRollupVerifier() internal {
rollupVerifier = new MultipleVersionRollupVerifier(address(zkEvmVerifierV1));
uint256[] memory _versions = new uint256[](1);
address[] memory _verifiers = new address[](1);
_versions[0] = 0;
_verifiers[0] = address(zkEvmVerifierV1);
rollupVerifier = new MultipleVersionRollupVerifier(L1_SCROLL_CHAIN_PROXY_ADDR, _versions, _verifiers);
logAddress("L1_MULTIPLE_VERSION_ROLLUP_VERIFIER_ADDR", address(rollupVerifier));
}

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity =0.8.24;
// solhint-disable no-console

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity =0.8.24;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity =0.8.24;
// solhint-disable no-console

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity =0.8.24;
// solhint-disable no-console

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity =0.8.24;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity =0.8.24;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity =0.8.24;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity =0.8.24;
import {Script} from "forge-std/Script.sol";
@@ -96,9 +96,6 @@ contract InitializeL1BridgeContracts is Script {
ScrollChain(L1_SCROLL_CHAIN_PROXY_ADDR).addSequencer(L1_COMMIT_SENDER_ADDRESS);
ScrollChain(L1_SCROLL_CHAIN_PROXY_ADDR).addProver(L1_FINALIZE_SENDER_ADDRESS);
// initialize MultipleVersionRollupVerifier
MultipleVersionRollupVerifier(L1_MULTIPLE_VERSION_ROLLUP_VERIFIER_ADDR).initialize(L1_SCROLL_CHAIN_PROXY_ADDR);
// initialize L2GasPriceOracle
L2GasPriceOracle(L2_GAS_PRICE_ORACLE_PROXY_ADDR).initialize(
21000, // _txGas

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity =0.8.24;
import {Script} from "forge-std/Script.sol";

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity =0.8.24;
import {Script} from "forge-std/Script.sol";

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.16;
pragma solidity =0.8.24;
import {Script} from "forge-std/Script.sol";

View File

@@ -1,5 +1,5 @@
/* eslint-disable node/no-missing-import */
import { ethers } from "ethers";
import { ethers, keccak256 } from "ethers";
import Contract from "circomlib/src/evmasm";
import * as constants from "circomlib/src/poseidon_constants";
@@ -90,10 +90,10 @@ export function createCode(nInputs: number) {
C.calldataload();
C.div();
C.dup(0);
C.push(ethers.utils.keccak256(ethers.utils.toUtf8Bytes(`poseidon(uint256[${nInputs}],uint256)`)).slice(0, 10)); // poseidon(uint256[n],uint256)
C.push(keccak256(ethers.toUtf8Bytes(`poseidon(uint256[${nInputs}],uint256)`)).slice(0, 10)); // poseidon(uint256[n],uint256)
C.eq();
C.swap(1);
C.push(ethers.utils.keccak256(ethers.utils.toUtf8Bytes(`poseidon(bytes32[${nInputs}],bytes32)`)).slice(0, 10)); // poseidon(bytes32[n],bytes32)
C.push(keccak256(ethers.toUtf8Bytes(`poseidon(bytes32[${nInputs}],bytes32)`)).slice(0, 10)); // poseidon(bytes32[n],bytes32)
C.eq();
C.or();
C.jmpi("start");

View File

@@ -1,6 +1,6 @@
// SPDX-License-Identifier: MIT
pragma solidity =0.8.16;
pragma solidity =0.8.24;
import {TimelockController} from "@openzeppelin/contracts/governance/TimelockController.sol";
import {ProxyAdmin} from "@openzeppelin/contracts/proxy/transparent/ProxyAdmin.sol";

View File

@@ -1,6 +1,6 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.16;
pragma solidity ^0.8.24;
import {IScrollMessenger} from "../libraries/IScrollMessenger.sol";

View File

@@ -1,6 +1,6 @@
// SPDX-License-Identifier: MIT
pragma solidity =0.8.16;
pragma solidity =0.8.24;
import {IScrollChain} from "./rollup/IScrollChain.sol";
import {IL1MessageQueue} from "./rollup/IL1MessageQueue.sol";

View File

@@ -1,6 +1,6 @@
// SPDX-License-Identifier: MIT
pragma solidity =0.8.16;
pragma solidity =0.8.24;
import {OwnableUpgradeable} from "@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol";
import {ECDSAUpgradeable} from "@openzeppelin/contracts-upgradeable/utils/cryptography/ECDSAUpgradeable.sol";

View File

@@ -1,6 +1,6 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.16;
pragma solidity ^0.8.24;
/// @title The interface for the ERC1155 cross chain gateway on layer 1.
interface IL1ERC1155Gateway {

View File

@@ -1,6 +1,6 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.16;
pragma solidity ^0.8.24;
interface IL1ERC20Gateway {
/**********

View File

@@ -1,6 +1,6 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.16;
pragma solidity ^0.8.24;
/// @title The interface for the ERC721 cross chain gateway on layer 1.
interface IL1ERC721Gateway {

View File

@@ -1,6 +1,6 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.16;
pragma solidity ^0.8.24;
interface IL1ETHGateway {
/**********

View File

@@ -1,6 +1,6 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.16;
pragma solidity ^0.8.24;
import {IL1ETHGateway} from "./IL1ETHGateway.sol";
import {IL1ERC20Gateway} from "./IL1ERC20Gateway.sol";

View File

@@ -1,6 +1,6 @@
// SPDX-License-Identifier: MIT
pragma solidity =0.8.16;
pragma solidity =0.8.24;
import {IERC20Upgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC20/IERC20Upgradeable.sol";
import {SafeERC20Upgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC20/utils/SafeERC20Upgradeable.sol";

View File

@@ -1,6 +1,6 @@
// SPDX-License-Identifier: MIT
pragma solidity =0.8.16;
pragma solidity =0.8.24;
import {IERC1155Upgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC1155/IERC1155Upgradeable.sol";
import {ERC1155HolderUpgradeable, ERC1155ReceiverUpgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC1155/utils/ERC1155HolderUpgradeable.sol";

View File

@@ -1,6 +1,6 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.16;
pragma solidity ^0.8.24;
import {IERC20Upgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC20/IERC20Upgradeable.sol";
import {SafeERC20Upgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC20/utils/SafeERC20Upgradeable.sol";

View File

@@ -1,6 +1,6 @@
// SPDX-License-Identifier: MIT
pragma solidity =0.8.16;
pragma solidity =0.8.24;
import {IERC721Upgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC721/IERC721Upgradeable.sol";
import {ERC721HolderUpgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC721/utils/ERC721HolderUpgradeable.sol";

Some files were not shown because too many files have changed in this diff Show More