mirror of
https://github.com/scroll-tech/scroll.git
synced 2026-01-12 15:38:18 -05:00
Compare commits
8 Commits
feat/optim
...
v4.3.80
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ed0e0e4c18 | ||
|
|
d203033e13 | ||
|
|
7d45926687 | ||
|
|
5362e28f74 | ||
|
|
e8eb7ff8fd | ||
|
|
b01b5819da | ||
|
|
cb09024821 | ||
|
|
8bd4277c13 |
4
.github/workflows/common.yml
vendored
4
.github/workflows/common.yml
vendored
@@ -85,9 +85,9 @@ jobs:
|
||||
- name: Install Solc
|
||||
uses: supplypike/setup-bin@v3
|
||||
with:
|
||||
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.16/solc-static-linux'
|
||||
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.24/solc-static-linux'
|
||||
name: 'solc'
|
||||
version: '0.8.16'
|
||||
version: '0.8.24'
|
||||
- name: Install Geth Tools
|
||||
uses: gacts/install-geth-tools@v1
|
||||
- name: Build prerequisites
|
||||
|
||||
14
.github/workflows/contracts.yml
vendored
14
.github/workflows/contracts.yml
vendored
@@ -43,10 +43,10 @@ jobs:
|
||||
- name: Setup LCOV
|
||||
uses: hrishikesh-kadam/setup-lcov@v1
|
||||
|
||||
- name: Install Node.js 14
|
||||
- name: Install Node.js 18
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '14'
|
||||
node-version: '18'
|
||||
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
@@ -73,13 +73,13 @@ jobs:
|
||||
run: yarn install
|
||||
|
||||
- name: Compile with foundry
|
||||
run: forge build
|
||||
run: forge build --evm-version cancun
|
||||
|
||||
- name: Run foundry tests
|
||||
run: forge test -vvv
|
||||
run: forge test --evm-version cancun -vvv
|
||||
|
||||
- name: Run foundry coverage
|
||||
run : forge coverage --report lcov
|
||||
run : forge coverage --evm-version cancun --report lcov
|
||||
|
||||
- name : Prune coverage
|
||||
run : lcov --rc branch_coverage=1 --remove ./lcov.info -o ./lcov.info.pruned 'src/mocks/*' 'src/test/*' 'scripts/*' 'node_modules/*' 'lib/*'
|
||||
@@ -102,10 +102,10 @@ jobs:
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Install Node.js 14
|
||||
- name: Install Node.js 18
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '14'
|
||||
node-version: '18'
|
||||
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
|
||||
4
.github/workflows/coordinator.yml
vendored
4
.github/workflows/coordinator.yml
vendored
@@ -101,9 +101,9 @@ jobs:
|
||||
- name: Install Solc
|
||||
uses: supplypike/setup-bin@v3
|
||||
with:
|
||||
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.16/solc-static-linux'
|
||||
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.24/solc-static-linux'
|
||||
name: 'solc'
|
||||
version: '0.8.16'
|
||||
version: '0.8.24'
|
||||
- name: Install Geth Tools
|
||||
uses: gacts/install-geth-tools@v1
|
||||
- name: Build prerequisites
|
||||
|
||||
4
.github/workflows/database.yml
vendored
4
.github/workflows/database.yml
vendored
@@ -78,9 +78,9 @@ jobs:
|
||||
- name: Install Solc
|
||||
uses: supplypike/setup-bin@v3
|
||||
with:
|
||||
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.16/solc-static-linux'
|
||||
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.24/solc-static-linux'
|
||||
name: 'solc'
|
||||
version: '0.8.16'
|
||||
version: '0.8.24'
|
||||
- name: Install Geth Tools
|
||||
uses: gacts/install-geth-tools@v1
|
||||
- name: Build prerequisites
|
||||
|
||||
@@ -8,7 +8,7 @@ require (
|
||||
github.com/go-redis/redis/v8 v8.11.5
|
||||
github.com/pressly/goose/v3 v3.16.0
|
||||
github.com/prometheus/client_golang v1.16.0
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20240314095130-4553f5f26935
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20240326144132-0f0cd99f7a2e
|
||||
github.com/stretchr/testify v1.9.0
|
||||
github.com/urfave/cli/v2 v2.25.7
|
||||
golang.org/x/sync v0.6.0
|
||||
@@ -60,6 +60,7 @@ require (
|
||||
github.com/holiman/uint256 v1.2.4 // indirect
|
||||
github.com/huin/goupnp v1.3.0 // indirect
|
||||
github.com/iden3/go-iden3-crypto v0.0.15 // indirect
|
||||
github.com/jackc/pgx/v5 v5.5.4 // indirect
|
||||
github.com/jackpal/go-nat-pmp v1.0.2 // indirect
|
||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||
github.com/jinzhu/now v1.1.5 // indirect
|
||||
|
||||
@@ -184,8 +184,8 @@ github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsI
|
||||
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
||||
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
|
||||
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
|
||||
github.com/jackc/pgx/v5 v5.5.0 h1:NxstgwndsTRy7eq9/kqYc/BZh5w2hHJV86wjvO+1xPw=
|
||||
github.com/jackc/pgx/v5 v5.5.0/go.mod h1:Ig06C2Vu0t5qXC60W8sqIthScaEnFvojjj9dSljmHRA=
|
||||
github.com/jackc/pgx/v5 v5.5.4 h1:Xp2aQS8uXButQdnCMWNmvx6UysWQQC+u1EoizjguY+8=
|
||||
github.com/jackc/pgx/v5 v5.5.4/go.mod h1:ez9gk+OAat140fv9ErkZDYFWmXLfV+++K0uAOiwgm1A=
|
||||
github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk=
|
||||
github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||
github.com/jackpal/go-nat-pmp v1.0.2 h1:KzKSgb7qkJvOUTqYl9/Hg/me3pWgBmERKrTGD7BdWus=
|
||||
@@ -311,8 +311,8 @@ github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
|
||||
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20240314095130-4553f5f26935 h1:bHBt6sillaT4o/9RjxkVX8pWwvEmu37uWBw4XbCjfzY=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20240314095130-4553f5f26935/go.mod h1:7Rz2bh9pn42rGuxjh51CG7HL9SKMG3ZugJkL3emdZx8=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20240326144132-0f0cd99f7a2e h1:FcoK0rykAWI+5E7cQM6ALRLd5CmjBTHRvJztRBH2xeM=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20240326144132-0f0cd99f7a2e/go.mod h1:7Rz2bh9pn42rGuxjh51CG7HL9SKMG3ZugJkL3emdZx8=
|
||||
github.com/scroll-tech/zktrie v0.7.1 h1:NrmZNjuBzsbrKePqdHDG+t2cXnimbtezPAFS0+L9ElE=
|
||||
github.com/scroll-tech/zktrie v0.7.1/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk=
|
||||
github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=
|
||||
|
||||
@@ -19,7 +19,7 @@ CAPELLA_FORK_VERSION: 0x20000092
|
||||
MAX_WITHDRAWALS_PER_PAYLOAD: 16
|
||||
|
||||
# Deneb
|
||||
DENEB_FORK_EPOCH: 1
|
||||
DENEB_FORK_EPOCH: 0
|
||||
DENEB_FORK_VERSION: 0x20000093
|
||||
|
||||
# Time parameters
|
||||
|
||||
@@ -19,7 +19,7 @@ services:
|
||||
command:
|
||||
- testnet
|
||||
- generate-genesis
|
||||
- --fork=capella
|
||||
- --fork=deneb
|
||||
- --num-validators=64
|
||||
- --genesis-time-delay=3
|
||||
- --output-ssz=/data/consensus/genesis.ssz
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
"archimedesBlock": 0,
|
||||
"shanghaiBlock": 0,
|
||||
"clique": {
|
||||
"period": 3,
|
||||
"period": 1,
|
||||
"epoch": 30000
|
||||
},
|
||||
"scroll": {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package forks
|
||||
|
||||
import (
|
||||
"math"
|
||||
"math/big"
|
||||
"sort"
|
||||
|
||||
@@ -8,30 +9,48 @@ import (
|
||||
)
|
||||
|
||||
// CollectSortedForkHeights returns a sorted set of block numbers that one or more forks are activated on
|
||||
func CollectSortedForkHeights(config *params.ChainConfig) ([]uint64, map[uint64]bool) {
|
||||
forkHeightsMap := make(map[uint64]bool)
|
||||
for _, fork := range []*big.Int{
|
||||
config.HomesteadBlock,
|
||||
config.DAOForkBlock,
|
||||
config.EIP150Block,
|
||||
config.EIP155Block,
|
||||
config.EIP158Block,
|
||||
config.ByzantiumBlock,
|
||||
config.ConstantinopleBlock,
|
||||
config.PetersburgBlock,
|
||||
config.IstanbulBlock,
|
||||
config.MuirGlacierBlock,
|
||||
config.BerlinBlock,
|
||||
config.LondonBlock,
|
||||
config.ArrowGlacierBlock,
|
||||
config.ArchimedesBlock,
|
||||
config.ShanghaiBlock,
|
||||
func CollectSortedForkHeights(config *params.ChainConfig) ([]uint64, map[uint64]bool, map[string]uint64) {
|
||||
type nameFork struct {
|
||||
name string
|
||||
block *big.Int
|
||||
}
|
||||
|
||||
forkHeightNameMap := make(map[uint64]string)
|
||||
|
||||
for _, fork := range []nameFork{
|
||||
{name: "homestead", block: config.HomesteadBlock},
|
||||
{name: "daoFork", block: config.DAOForkBlock},
|
||||
{name: "eip150", block: config.EIP150Block},
|
||||
{name: "eip155", block: config.EIP155Block},
|
||||
{name: "eip158", block: config.EIP158Block},
|
||||
{name: "byzantium", block: config.ByzantiumBlock},
|
||||
{name: "constantinople", block: config.ConstantinopleBlock},
|
||||
{name: "petersburg", block: config.PetersburgBlock},
|
||||
{name: "istanbul", block: config.IstanbulBlock},
|
||||
{name: "muirGlacier", block: config.MuirGlacierBlock},
|
||||
{name: "berlin", block: config.BerlinBlock},
|
||||
{name: "london", block: config.LondonBlock},
|
||||
{name: "arrowGlacier", block: config.ArrowGlacierBlock},
|
||||
{name: "archimedes", block: config.ArchimedesBlock},
|
||||
{name: "shanghai", block: config.ShanghaiBlock},
|
||||
{name: "bernoulli", block: config.BernoulliBlock},
|
||||
{name: "curie", block: config.CurieBlock},
|
||||
} {
|
||||
if fork == nil {
|
||||
if fork.block == nil {
|
||||
continue
|
||||
} else if height := fork.Uint64(); height != 0 {
|
||||
forkHeightsMap[height] = true
|
||||
}
|
||||
height := fork.block.Uint64()
|
||||
|
||||
// only keep latest fork for at each height, discard the rest
|
||||
forkHeightNameMap[height] = fork.name
|
||||
}
|
||||
|
||||
forkHeightsMap := make(map[uint64]bool)
|
||||
forkNameHeightMap := make(map[string]uint64)
|
||||
|
||||
for height, name := range forkHeightNameMap {
|
||||
forkHeightsMap[height] = true
|
||||
forkNameHeightMap[name] = height
|
||||
}
|
||||
|
||||
var forkHeights []uint64
|
||||
@@ -41,7 +60,7 @@ func CollectSortedForkHeights(config *params.ChainConfig) ([]uint64, map[uint64]
|
||||
sort.Slice(forkHeights, func(i, j int) bool {
|
||||
return forkHeights[i] < forkHeights[j]
|
||||
})
|
||||
return forkHeights, forkHeightsMap
|
||||
return forkHeights, forkHeightsMap, forkNameHeightMap
|
||||
}
|
||||
|
||||
// BlocksUntilFork returns the number of blocks until the next fork
|
||||
@@ -54,3 +73,17 @@ func BlocksUntilFork(blockHeight uint64, forkHeights []uint64) uint64 {
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// BlockRange returns the block range of the hard fork
|
||||
// Need ensure the forkHeights is incremental
|
||||
func BlockRange(currentForkHeight uint64, forkHeights []uint64) (from, to uint64) {
|
||||
to = math.MaxInt64
|
||||
for _, height := range forkHeights {
|
||||
if currentForkHeight < height {
|
||||
to = height
|
||||
return
|
||||
}
|
||||
from = height
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package forks
|
||||
|
||||
import (
|
||||
"math"
|
||||
"math/big"
|
||||
"testing"
|
||||
|
||||
@@ -9,20 +10,27 @@ import (
|
||||
)
|
||||
|
||||
func TestCollectSortedForkBlocks(t *testing.T) {
|
||||
l, m := CollectSortedForkHeights(¶ms.ChainConfig{
|
||||
EIP155Block: big.NewInt(4),
|
||||
EIP158Block: big.NewInt(3),
|
||||
ByzantiumBlock: big.NewInt(3),
|
||||
ConstantinopleBlock: big.NewInt(0),
|
||||
l, m, n := CollectSortedForkHeights(¶ms.ChainConfig{
|
||||
ArchimedesBlock: big.NewInt(0),
|
||||
ShanghaiBlock: big.NewInt(3),
|
||||
BernoulliBlock: big.NewInt(3),
|
||||
CurieBlock: big.NewInt(4),
|
||||
})
|
||||
require.Equal(t, l, []uint64{
|
||||
0,
|
||||
3,
|
||||
4,
|
||||
})
|
||||
require.Equal(t, map[uint64]bool{
|
||||
3: true,
|
||||
4: true,
|
||||
0: true,
|
||||
}, m)
|
||||
require.Equal(t, map[string]uint64{
|
||||
"archimedes": 0,
|
||||
"bernoulli": 3,
|
||||
"curie": 4,
|
||||
}, n)
|
||||
}
|
||||
|
||||
func TestBlocksUntilFork(t *testing.T) {
|
||||
@@ -64,3 +72,71 @@ func TestBlocksUntilFork(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestBlockRange(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
forkHeight uint64
|
||||
forkHeights []uint64
|
||||
expectedFrom uint64
|
||||
expectedTo uint64
|
||||
}{
|
||||
{
|
||||
name: "ToInfinite",
|
||||
forkHeight: 300,
|
||||
forkHeights: []uint64{100, 200, 300},
|
||||
expectedFrom: 300,
|
||||
expectedTo: math.MaxInt64,
|
||||
},
|
||||
{
|
||||
name: "To300",
|
||||
forkHeight: 200,
|
||||
forkHeights: []uint64{100, 200, 300},
|
||||
expectedFrom: 200,
|
||||
expectedTo: 300,
|
||||
},
|
||||
{
|
||||
name: "To200",
|
||||
forkHeight: 100,
|
||||
forkHeights: []uint64{100, 200, 300},
|
||||
expectedFrom: 100,
|
||||
expectedTo: 200,
|
||||
},
|
||||
{
|
||||
name: "To100",
|
||||
forkHeight: 0,
|
||||
forkHeights: []uint64{100, 200, 300},
|
||||
expectedFrom: 0,
|
||||
expectedTo: 100,
|
||||
},
|
||||
{
|
||||
name: "To200-1",
|
||||
forkHeight: 100,
|
||||
forkHeights: []uint64{100, 200},
|
||||
expectedFrom: 100,
|
||||
expectedTo: 200,
|
||||
},
|
||||
{
|
||||
name: "To2",
|
||||
forkHeight: 1,
|
||||
forkHeights: []uint64{1, 2},
|
||||
expectedFrom: 1,
|
||||
expectedTo: 2,
|
||||
},
|
||||
{
|
||||
name: "ToInfinite-1",
|
||||
forkHeight: 0,
|
||||
forkHeights: []uint64{0},
|
||||
expectedFrom: 0,
|
||||
expectedTo: math.MaxInt64,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
from, to := BlockRange(test.forkHeight, test.forkHeights)
|
||||
require.Equal(t, test.expectedFrom, from)
|
||||
require.Equal(t, test.expectedTo, to)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,10 +16,11 @@ require (
|
||||
github.com/modern-go/reflect2 v1.0.2
|
||||
github.com/orcaman/concurrent-map v1.0.0
|
||||
github.com/prometheus/client_golang v1.16.0
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20240314095130-4553f5f26935
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20240326144132-0f0cd99f7a2e
|
||||
github.com/stretchr/testify v1.9.0
|
||||
github.com/testcontainers/testcontainers-go v0.29.1
|
||||
github.com/testcontainers/testcontainers-go/modules/compose v0.29.1
|
||||
github.com/testcontainers/testcontainers-go/modules/postgres v0.29.1
|
||||
github.com/urfave/cli/v2 v2.25.7
|
||||
gorm.io/driver/postgres v1.5.0
|
||||
gorm.io/gorm v1.25.5
|
||||
@@ -127,7 +128,7 @@ require (
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect
|
||||
github.com/jackc/pgx/v5 v5.5.0 // indirect
|
||||
github.com/jackc/pgx/v5 v5.5.4 // indirect
|
||||
github.com/jackc/puddle/v2 v2.2.1 // indirect
|
||||
github.com/jackpal/go-nat-pmp v1.0.2 // indirect
|
||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||
|
||||
@@ -382,8 +382,8 @@ github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5ey
|
||||
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
|
||||
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
|
||||
github.com/jackc/pgx/v5 v5.3.0/go.mod h1:t3JDKnCBlYIc0ewLF0Q7B8MXmoIaBOZj/ic7iHozM/8=
|
||||
github.com/jackc/pgx/v5 v5.5.0 h1:NxstgwndsTRy7eq9/kqYc/BZh5w2hHJV86wjvO+1xPw=
|
||||
github.com/jackc/pgx/v5 v5.5.0/go.mod h1:Ig06C2Vu0t5qXC60W8sqIthScaEnFvojjj9dSljmHRA=
|
||||
github.com/jackc/pgx/v5 v5.5.4 h1:Xp2aQS8uXButQdnCMWNmvx6UysWQQC+u1EoizjguY+8=
|
||||
github.com/jackc/pgx/v5 v5.5.4/go.mod h1:ez9gk+OAat140fv9ErkZDYFWmXLfV+++K0uAOiwgm1A=
|
||||
github.com/jackc/puddle/v2 v2.2.0/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||
github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk=
|
||||
github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||
@@ -614,8 +614,8 @@ github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
|
||||
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20240314095130-4553f5f26935 h1:bHBt6sillaT4o/9RjxkVX8pWwvEmu37uWBw4XbCjfzY=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20240314095130-4553f5f26935/go.mod h1:7Rz2bh9pn42rGuxjh51CG7HL9SKMG3ZugJkL3emdZx8=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20240326144132-0f0cd99f7a2e h1:FcoK0rykAWI+5E7cQM6ALRLd5CmjBTHRvJztRBH2xeM=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20240326144132-0f0cd99f7a2e/go.mod h1:7Rz2bh9pn42rGuxjh51CG7HL9SKMG3ZugJkL3emdZx8=
|
||||
github.com/scroll-tech/zktrie v0.7.1 h1:NrmZNjuBzsbrKePqdHDG+t2cXnimbtezPAFS0+L9ElE=
|
||||
github.com/scroll-tech/zktrie v0.7.1/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk=
|
||||
github.com/secure-systems-lab/go-securesystemslib v0.4.0 h1:b23VGrQhTA8cN2CbBw7/FulN9fTtqYUdS5+Oxzt+DUE=
|
||||
@@ -682,6 +682,8 @@ github.com/testcontainers/testcontainers-go v0.29.1 h1:z8kxdFlovA2y97RWx98v/TQ+t
|
||||
github.com/testcontainers/testcontainers-go v0.29.1/go.mod h1:SnKnKQav8UcgtKqjp/AD8bE1MqZm+3TDb/B8crE3XnI=
|
||||
github.com/testcontainers/testcontainers-go/modules/compose v0.29.1 h1:47ipPM+s+ltCDOP3Sa1j95AkNb+z+WGiHLDbLU8ixuc=
|
||||
github.com/testcontainers/testcontainers-go/modules/compose v0.29.1/go.mod h1:Sqh+Ef2ESdbJQjTJl57UOkEHkOc7gXvQLg1b5xh6f1Y=
|
||||
github.com/testcontainers/testcontainers-go/modules/postgres v0.29.1 h1:hTn3MzhR9w4btwfzr/NborGCaeNZG0MPBpufeDj10KA=
|
||||
github.com/testcontainers/testcontainers-go/modules/postgres v0.29.1/go.mod h1:YsWyy+pHDgvGdi0axGOx6CGXWsE6eqSaApyd1FYYSSc=
|
||||
github.com/theupdateframework/notary v0.7.0 h1:QyagRZ7wlSpjT5N2qQAh/pN+DVqgekv4DzbAiAiEL3c=
|
||||
github.com/theupdateframework/notary v0.7.0/go.mod h1:c9DRxcmhHmVLDay4/2fUYdISnHqbFDGRSlXPO0AhYWw=
|
||||
github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375 h1:QB54BJwA6x8QU9nHY3xJSZR2kX9bgpZekRKGkLTmEXA=
|
||||
|
||||
1028
common/libzkp/impl/Cargo.lock
generated
1028
common/libzkp/impl/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -8,26 +8,29 @@ edition = "2021"
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[patch.crates-io]
|
||||
gobuild = { git = "https://github.com/scroll-tech/gobuild.git" }
|
||||
halo2curves = { git = "https://github.com/scroll-tech/halo2curves", branch = "v0.1.0" }
|
||||
ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
|
||||
ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
|
||||
ethers-signers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
|
||||
#ethers-etherscan = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
|
||||
#ethers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
|
||||
[patch."https://github.com/privacy-scaling-explorations/halo2.git"]
|
||||
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "develop" }
|
||||
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
|
||||
[patch."https://github.com/privacy-scaling-explorations/poseidon.git"]
|
||||
poseidon = { git = "https://github.com/scroll-tech/poseidon.git", branch = "scroll-dev-0220" }
|
||||
[patch."https://github.com/privacy-scaling-explorations/halo2wrong.git"]
|
||||
halo2wrong = { git = "https://github.com/scroll-tech/halo2wrong.git", branch = "halo2-ecc-snark-verifier-0323" }
|
||||
maingate = { git = "https://github.com/scroll-tech/halo2wrong", branch = "halo2-ecc-snark-verifier-0323" }
|
||||
[patch."https://github.com/privacy-scaling-explorations/halo2curves.git"]
|
||||
halo2curves = { git = "https://github.com/scroll-tech/halo2curves.git", branch = "0.3.1-derive-serde" }
|
||||
poseidon = { git = "https://github.com/scroll-tech/poseidon.git", branch = "main" }
|
||||
[patch."https://github.com/privacy-scaling-explorations/bls12_381"]
|
||||
bls12_381 = { git = "https://github.com/scroll-tech/bls12_381", branch = "feat/impl_scalar_field" }
|
||||
|
||||
[dependencies]
|
||||
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "develop" }
|
||||
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.9.9", default-features = false, features = ["parallel_syn", "scroll", "shanghai", "strict-ccc"] }
|
||||
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
|
||||
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.10.0k", default-features = false, features = ["parallel_syn", "scroll", "shanghai"] }
|
||||
|
||||
base64 = "0.13.0"
|
||||
env_logger = "0.9.0"
|
||||
libc = "0.2"
|
||||
log = "0.4"
|
||||
once_cell = "1.8.0"
|
||||
once_cell = "1.19"
|
||||
serde = "1.0"
|
||||
serde_derive = "1.0"
|
||||
serde_json = "1.0.66"
|
||||
|
||||
@@ -1 +1 @@
|
||||
nightly-2022-12-10
|
||||
nightly-2023-12-03
|
||||
|
||||
@@ -119,7 +119,7 @@ pub unsafe extern "C" fn gen_batch_proof(
|
||||
|
||||
let chunk_hashes_proofs = chunk_hashes
|
||||
.into_iter()
|
||||
.zip(chunk_proofs.into_iter())
|
||||
.zip(chunk_proofs)
|
||||
.collect();
|
||||
|
||||
let proof = PROVER
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#![feature(once_cell)]
|
||||
|
||||
mod batch;
|
||||
mod chunk;
|
||||
mod types;
|
||||
|
||||
176
common/testcontainers/testcontainers.go
Normal file
176
common/testcontainers/testcontainers.go
Normal file
@@ -0,0 +1,176 @@
|
||||
package testcontainers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"github.com/scroll-tech/go-ethereum/ethclient"
|
||||
"github.com/testcontainers/testcontainers-go"
|
||||
"github.com/testcontainers/testcontainers-go/modules/postgres"
|
||||
"github.com/testcontainers/testcontainers-go/wait"
|
||||
)
|
||||
|
||||
// TestcontainerApps testcontainers struct
|
||||
type TestcontainerApps struct {
|
||||
postgresContainer *postgres.PostgresContainer
|
||||
l1GethContainer *testcontainers.DockerContainer
|
||||
l2GethContainer *testcontainers.DockerContainer
|
||||
|
||||
// common time stamp in nanoseconds.
|
||||
Timestamp int
|
||||
}
|
||||
|
||||
// NewTestcontainerApps returns new instance of TestcontainerApps struct
|
||||
func NewTestcontainerApps() *TestcontainerApps {
|
||||
timestamp := time.Now().Nanosecond()
|
||||
return &TestcontainerApps{
|
||||
Timestamp: timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
// StartPostgresContainer starts a postgres container
|
||||
func (t *TestcontainerApps) StartPostgresContainer() error {
|
||||
if t.postgresContainer != nil && t.postgresContainer.IsRunning() {
|
||||
return nil
|
||||
}
|
||||
postgresContainer, err := postgres.RunContainer(context.Background(),
|
||||
testcontainers.WithImage("postgres"),
|
||||
postgres.WithDatabase("test_db"),
|
||||
postgres.WithPassword("123456"),
|
||||
testcontainers.WithWaitStrategy(
|
||||
wait.ForLog("database system is ready to accept connections").WithOccurrence(2).WithStartupTimeout(5*time.Second)),
|
||||
)
|
||||
if err != nil {
|
||||
log.Printf("failed to start postgres container: %s", err)
|
||||
return err
|
||||
}
|
||||
t.postgresContainer = postgresContainer
|
||||
return nil
|
||||
}
|
||||
|
||||
// StartL1GethContainer starts a L1Geth container
|
||||
func (t *TestcontainerApps) StartL1GethContainer() error {
|
||||
if t.l1GethContainer != nil && t.l1GethContainer.IsRunning() {
|
||||
return nil
|
||||
}
|
||||
req := testcontainers.ContainerRequest{
|
||||
Image: "scroll_l1geth",
|
||||
ExposedPorts: []string{"8546/tcp", "8545/tcp"},
|
||||
WaitingFor: wait.ForHTTP("/").WithPort("8545").WithStartupTimeout(100 * time.Second),
|
||||
Cmd: []string{"--log.debug", "ANY"},
|
||||
}
|
||||
genericContainerReq := testcontainers.GenericContainerRequest{
|
||||
ContainerRequest: req,
|
||||
Started: true,
|
||||
}
|
||||
container, err := testcontainers.GenericContainer(context.Background(), genericContainerReq)
|
||||
if err != nil {
|
||||
log.Printf("failed to start scroll_l1geth container: %s", err)
|
||||
return err
|
||||
}
|
||||
t.l1GethContainer, _ = container.(*testcontainers.DockerContainer)
|
||||
return nil
|
||||
}
|
||||
|
||||
// StartL2GethContainer starts a L2Geth container
|
||||
func (t *TestcontainerApps) StartL2GethContainer() error {
|
||||
if t.l2GethContainer != nil && t.l2GethContainer.IsRunning() {
|
||||
return nil
|
||||
}
|
||||
req := testcontainers.ContainerRequest{
|
||||
Image: "scroll_l2geth",
|
||||
ExposedPorts: []string{"8546/tcp", "8545/tcp"},
|
||||
WaitingFor: wait.ForHTTP("/").WithPort("8545").WithStartupTimeout(100 * time.Second),
|
||||
}
|
||||
genericContainerReq := testcontainers.GenericContainerRequest{
|
||||
ContainerRequest: req,
|
||||
Started: true,
|
||||
}
|
||||
container, err := testcontainers.GenericContainer(context.Background(), genericContainerReq)
|
||||
if err != nil {
|
||||
log.Printf("failed to start scroll_l2geth container: %s", err)
|
||||
return err
|
||||
}
|
||||
t.l2GethContainer, _ = container.(*testcontainers.DockerContainer)
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetDBEndPoint returns the endpoint of the running postgres container
|
||||
func (t *TestcontainerApps) GetDBEndPoint() (string, error) {
|
||||
if t.postgresContainer == nil || !t.postgresContainer.IsRunning() {
|
||||
return "", fmt.Errorf("postgres is not running")
|
||||
}
|
||||
return t.postgresContainer.ConnectionString(context.Background(), "sslmode=disable")
|
||||
}
|
||||
|
||||
// GetL1GethEndPoint returns the endpoint of the running L1Geth container
|
||||
func (t *TestcontainerApps) GetL1GethEndPoint() (string, error) {
|
||||
if t.l1GethContainer == nil || !t.l1GethContainer.IsRunning() {
|
||||
return "", fmt.Errorf("l1 geth is not running")
|
||||
}
|
||||
endpoint, err := t.l1GethContainer.PortEndpoint(context.Background(), "8546/tcp", "ws")
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return endpoint, nil
|
||||
}
|
||||
|
||||
// GetL2GethEndPoint returns the endpoint of the running L2Geth container
|
||||
func (t *TestcontainerApps) GetL2GethEndPoint() (string, error) {
|
||||
if t.l2GethContainer == nil || !t.l2GethContainer.IsRunning() {
|
||||
return "", fmt.Errorf("l2 geth is not running")
|
||||
}
|
||||
endpoint, err := t.l2GethContainer.PortEndpoint(context.Background(), "8546/tcp", "ws")
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return endpoint, nil
|
||||
}
|
||||
|
||||
// GetL1GethClient returns a ethclient by dialing running L1Geth
|
||||
func (t *TestcontainerApps) GetL1GethClient() (*ethclient.Client, error) {
|
||||
endpoint, err := t.GetL1GethEndPoint()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
client, err := ethclient.Dial(endpoint)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return client, nil
|
||||
}
|
||||
|
||||
// GetL2GethClient returns a ethclient by dialing running L2Geth
|
||||
func (t *TestcontainerApps) GetL2GethClient() (*ethclient.Client, error) {
|
||||
endpoint, err := t.GetL2GethEndPoint()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
client, err := ethclient.Dial(endpoint)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return client, nil
|
||||
}
|
||||
|
||||
// Free stops all running containers
|
||||
func (t *TestcontainerApps) Free() {
|
||||
ctx := context.Background()
|
||||
if t.postgresContainer != nil && t.postgresContainer.IsRunning() {
|
||||
if err := t.postgresContainer.Terminate(ctx); err != nil {
|
||||
log.Printf("failed to stop postgres container: %s", err)
|
||||
}
|
||||
}
|
||||
if t.l1GethContainer != nil && t.l1GethContainer.IsRunning() {
|
||||
if err := t.l1GethContainer.Terminate(ctx); err != nil {
|
||||
log.Printf("failed to stop scroll_l1geth container: %s", err)
|
||||
}
|
||||
}
|
||||
if t.l2GethContainer != nil && t.l2GethContainer.IsRunning() {
|
||||
if err := t.l2GethContainer.Terminate(ctx); err != nil {
|
||||
log.Printf("failed to stop scroll_l2geth container: %s", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
54
common/testcontainers/testcontainers_test.go
Normal file
54
common/testcontainers/testcontainers_test.go
Normal file
@@ -0,0 +1,54 @@
|
||||
package testcontainers
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/scroll-tech/go-ethereum/ethclient"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
// TestNewTestcontainerApps tests NewTestcontainerApps
|
||||
func TestNewTestcontainerApps(t *testing.T) {
|
||||
var (
|
||||
err error
|
||||
endpoint string
|
||||
client *ethclient.Client
|
||||
)
|
||||
|
||||
// test start testcontainers
|
||||
testApps := NewTestcontainerApps()
|
||||
assert.NoError(t, testApps.StartPostgresContainer())
|
||||
endpoint, err = testApps.GetDBEndPoint()
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, endpoint)
|
||||
|
||||
assert.NoError(t, testApps.StartL1GethContainer())
|
||||
endpoint, err = testApps.GetL1GethEndPoint()
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, endpoint)
|
||||
client, err = testApps.GetL1GethClient()
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, client)
|
||||
|
||||
assert.NoError(t, testApps.StartL2GethContainer())
|
||||
endpoint, err = testApps.GetL2GethEndPoint()
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, endpoint)
|
||||
client, err = testApps.GetL2GethClient()
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, client)
|
||||
|
||||
// test free testcontainers
|
||||
testApps.Free()
|
||||
endpoint, err = testApps.GetDBEndPoint()
|
||||
assert.EqualError(t, err, "postgres is not running")
|
||||
assert.Empty(t, endpoint)
|
||||
|
||||
endpoint, err = testApps.GetL1GethEndPoint()
|
||||
assert.EqualError(t, err, "l1 geth is not running")
|
||||
assert.Empty(t, endpoint)
|
||||
|
||||
endpoint, err = testApps.GetL2GethEndPoint()
|
||||
assert.EqualError(t, err, "l2 geth is not running")
|
||||
assert.Empty(t, endpoint)
|
||||
}
|
||||
@@ -442,9 +442,9 @@ func EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) {
|
||||
}
|
||||
|
||||
// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately.
|
||||
func EstimateBatchL1CommitCalldataSize(c *encoding.Batch) (uint64, error) {
|
||||
func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) {
|
||||
var totalL1CommitCalldataSize uint64
|
||||
for _, chunk := range c.Chunks {
|
||||
for _, chunk := range b.Chunks {
|
||||
chunkL1CommitCalldataSize, err := EstimateChunkL1CommitCalldataSize(chunk)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
|
||||
@@ -210,6 +210,10 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) {
|
||||
return nil, fmt.Errorf("too many chunks in batch")
|
||||
}
|
||||
|
||||
if len(batch.Chunks) == 0 {
|
||||
return nil, fmt.Errorf("too few chunks in batch")
|
||||
}
|
||||
|
||||
// batch data hash
|
||||
dataHash, err := computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore)
|
||||
if err != nil {
|
||||
@@ -284,52 +288,51 @@ func constructBlobPayload(chunks []*encoding.Chunk) (*kzg4844.Blob, *kzg4844.Poi
|
||||
// the raw (un-padded) blob payload
|
||||
blobBytes := make([]byte, metadataLength)
|
||||
|
||||
// the number of chunks that contain at least one L2 transaction
|
||||
numNonEmptyChunks := 0
|
||||
|
||||
// challenge digest preimage
|
||||
// 1 hash for metadata and 1 for each chunk
|
||||
challengePreimage := make([]byte, (1+MaxNumChunks)*32)
|
||||
|
||||
// the challenge point z
|
||||
var z kzg4844.Point
|
||||
// the chunk data hash used for calculating the challenge preimage
|
||||
var chunkDataHash common.Hash
|
||||
|
||||
// blob metadata: num_chunks
|
||||
binary.BigEndian.PutUint16(blobBytes[0:], uint16(len(chunks)))
|
||||
|
||||
// encode blob metadata and L2 transactions,
|
||||
// and simultaneously also build challenge preimage
|
||||
for chunkID, chunk := range chunks {
|
||||
currentChunkStartIndex := len(blobBytes)
|
||||
hasL2Tx := false
|
||||
|
||||
for _, block := range chunk.Blocks {
|
||||
for _, tx := range block.Transactions {
|
||||
if tx.Type != types.L1MessageTxType {
|
||||
hasL2Tx = true
|
||||
// encode L2 txs into blob payload
|
||||
rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
blobBytes = append(blobBytes, rlpTxData...)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// blob metadata: chunki_size
|
||||
chunkSize := len(blobBytes) - currentChunkStartIndex
|
||||
binary.BigEndian.PutUint32(blobBytes[2+4*chunkID:], uint32(chunkSize))
|
||||
|
||||
if hasL2Tx {
|
||||
numNonEmptyChunks++
|
||||
if chunkSize := len(blobBytes) - currentChunkStartIndex; chunkSize != 0 {
|
||||
binary.BigEndian.PutUint32(blobBytes[2+4*chunkID:], uint32(chunkSize))
|
||||
}
|
||||
|
||||
// challenge: compute chunk data hash
|
||||
hash := crypto.Keccak256Hash(blobBytes[currentChunkStartIndex:])
|
||||
copy(challengePreimage[32+chunkID*32:], hash[:])
|
||||
chunkDataHash = crypto.Keccak256Hash(blobBytes[currentChunkStartIndex:])
|
||||
copy(challengePreimage[32+chunkID*32:], chunkDataHash[:])
|
||||
}
|
||||
|
||||
// blob metadata: num_chunks
|
||||
binary.BigEndian.PutUint16(blobBytes[0:], uint16(numNonEmptyChunks))
|
||||
// if we have fewer than MaxNumChunks chunks, the rest
|
||||
// of the blob metadata is correctly initialized to 0,
|
||||
// but we need to add padding to the challenge preimage
|
||||
for chunkID := len(chunks); chunkID < MaxNumChunks; chunkID++ {
|
||||
// use the last chunk's data hash as padding
|
||||
copy(challengePreimage[32+chunkID*32:], chunkDataHash[:])
|
||||
}
|
||||
|
||||
// challenge: compute metadata hash
|
||||
hash := crypto.Keccak256Hash(blobBytes[0:metadataLength])
|
||||
@@ -342,9 +345,14 @@ func constructBlobPayload(chunks []*encoding.Chunk) (*kzg4844.Blob, *kzg4844.Poi
|
||||
}
|
||||
|
||||
// compute z = challenge_digest % BLS_MODULUS
|
||||
challengeDigest := crypto.Keccak256Hash(challengePreimage[:])
|
||||
point := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus)
|
||||
copy(z[:], point.Bytes()[0:32])
|
||||
challengeDigest := crypto.Keccak256Hash(challengePreimage)
|
||||
pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus)
|
||||
pointBytes := pointBigInt.Bytes()
|
||||
|
||||
// the challenge point z
|
||||
var z kzg4844.Point
|
||||
start := 32 - len(pointBytes)
|
||||
copy(z[start:], pointBytes)
|
||||
|
||||
return blob, &z, nil
|
||||
}
|
||||
@@ -443,8 +451,55 @@ func (b *DABatch) BlobDataProof() ([]byte, error) {
|
||||
return BlobDataProofArgs.Pack(values...)
|
||||
}
|
||||
|
||||
// Blob returns the blob of the batch.
|
||||
func (b *DABatch) Blob() *kzg4844.Blob {
|
||||
return b.blob
|
||||
}
|
||||
|
||||
// DecodeFromCalldata attempts to decode a DABatch and an array of DAChunks from the provided calldata byte slice.
|
||||
func DecodeFromCalldata(data []byte) (*DABatch, []*DAChunk, error) {
|
||||
// TODO: implement this function.
|
||||
return nil, nil, nil
|
||||
}
|
||||
|
||||
// EstimateChunkL1CommitBlobSize estimates the size of the L1 commit blob for a single chunk.
|
||||
func EstimateChunkL1CommitBlobSize(c *encoding.Chunk) (uint64, error) {
|
||||
metadataSize := uint64(2 + 4*MaxNumChunks) // over-estimate: adding metadata length
|
||||
chunkDataSize, err := chunkL1CommitBlobDataSize(c)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
paddedSize := ((metadataSize + chunkDataSize + 30) / 31) * 32
|
||||
return paddedSize, nil
|
||||
}
|
||||
|
||||
// EstimateBatchL1CommitBlobSize estimates the total size of the L1 commit blob for a batch.
|
||||
func EstimateBatchL1CommitBlobSize(b *encoding.Batch) (uint64, error) {
|
||||
metadataSize := uint64(2 + 4*MaxNumChunks)
|
||||
var batchDataSize uint64
|
||||
for _, c := range b.Chunks {
|
||||
chunkDataSize, err := chunkL1CommitBlobDataSize(c)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
batchDataSize += chunkDataSize
|
||||
}
|
||||
paddedSize := ((metadataSize + batchDataSize + 30) / 31) * 32
|
||||
return paddedSize, nil
|
||||
}
|
||||
|
||||
func chunkL1CommitBlobDataSize(c *encoding.Chunk) (uint64, error) {
|
||||
var dataSize uint64
|
||||
for _, block := range c.Blocks {
|
||||
for _, tx := range block.Transactions {
|
||||
if tx.Type != types.L1MessageTxType {
|
||||
rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
dataSize += uint64(len(rlpTxData))
|
||||
}
|
||||
}
|
||||
}
|
||||
return dataSize, nil
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -8,6 +8,17 @@ import (
|
||||
"github.com/scroll-tech/go-ethereum/core/types"
|
||||
)
|
||||
|
||||
// CodecVersion defines the version of encoder and decoder.
|
||||
type CodecVersion int
|
||||
|
||||
const (
|
||||
// CodecV0 represents the version 0 of the encoder and decoder.
|
||||
CodecV0 CodecVersion = iota
|
||||
|
||||
// CodecV1 represents the version 1 of the encoder and decoder.
|
||||
CodecV1
|
||||
)
|
||||
|
||||
// Block represents an L2 block.
|
||||
type Block struct {
|
||||
Header *types.Header
|
||||
@@ -209,8 +220,3 @@ func (b *Batch) WithdrawRoot() common.Hash {
|
||||
lastChunkBlockNum := len(b.Chunks[numChunks-1].Blocks)
|
||||
return b.Chunks[len(b.Chunks)-1].Blocks[lastChunkBlockNum-1].WithdrawRoot
|
||||
}
|
||||
|
||||
// NumChunks gets the number of chunks of the batch.
|
||||
func (b *Batch) NumChunks() uint64 {
|
||||
return uint64(len(b.Chunks))
|
||||
}
|
||||
|
||||
@@ -75,7 +75,6 @@ func TestUtilFunctions(t *testing.T) {
|
||||
assert.Equal(t, uint64(240000), chunk3.L2GasUsed())
|
||||
|
||||
// Test Batch methods
|
||||
assert.Equal(t, uint64(3), batch.NumChunks())
|
||||
assert.Equal(t, block6.Header.Root, batch.StateRoot())
|
||||
assert.Equal(t, block6.WithdrawRoot, batch.WithdrawRoot())
|
||||
}
|
||||
|
||||
@@ -259,6 +259,7 @@ type ChunkInfo struct {
|
||||
WithdrawRoot common.Hash `json:"withdraw_root"`
|
||||
DataHash common.Hash `json:"data_hash"`
|
||||
IsPadding bool `json:"is_padding"`
|
||||
TxBytes []byte `json:"tx_bytes"`
|
||||
}
|
||||
|
||||
// ChunkProof includes the proof info that are required for chunk verification and rollup.
|
||||
|
||||
@@ -3,11 +3,16 @@ package utils
|
||||
import (
|
||||
"context"
|
||||
"crypto/rand"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/modern-go/reflect2"
|
||||
"github.com/scroll-tech/go-ethereum/core"
|
||||
)
|
||||
|
||||
// TryTimes try run several times until the function return true.
|
||||
@@ -59,3 +64,17 @@ func RandomURL() string {
|
||||
id, _ := rand.Int(rand.Reader, big.NewInt(5000-1))
|
||||
return fmt.Sprintf("localhost:%d", 10000+2000+id.Int64())
|
||||
}
|
||||
|
||||
// ReadGenesis parses and returns the genesis file at the given path
|
||||
func ReadGenesis(genesisPath string) (*core.Genesis, error) {
|
||||
file, err := os.Open(filepath.Clean(genesisPath))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
genesis := new(core.Genesis)
|
||||
if err := json.NewDecoder(file).Decode(genesis); err != nil {
|
||||
return nil, errors.Join(err, file.Close())
|
||||
}
|
||||
return genesis, file.Close()
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
"runtime/debug"
|
||||
)
|
||||
|
||||
var tag = "v4.3.74"
|
||||
var tag = "v4.3.80"
|
||||
|
||||
var commit = func() string {
|
||||
if info, ok := debug.ReadBuildInfo(); ok {
|
||||
|
||||
1
contracts/.nvmrc
Normal file
1
contracts/.nvmrc
Normal file
@@ -0,0 +1 @@
|
||||
v18.15.0
|
||||
@@ -162,7 +162,7 @@ Initialize the storage of L1ERC1155Gateway.
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _counterpart | address | The address of L2ERC1155Gateway in L2. |
|
||||
| _messenger | address | The address of L1ScrollMessenger. |
|
||||
| _messenger | address | The address of L1ScrollMessenger in L1. |
|
||||
|
||||
### messenger
|
||||
|
||||
@@ -389,12 +389,12 @@ Emitted when the ERC1155 NFT is batch deposited to gateway on layer 1.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _l1Token `indexed` | address | undefined |
|
||||
| _l2Token `indexed` | address | undefined |
|
||||
| _from `indexed` | address | undefined |
|
||||
| _to | address | undefined |
|
||||
| _tokenIds | uint256[] | undefined |
|
||||
| _amounts | uint256[] | undefined |
|
||||
| _l1Token `indexed` | address | The address of ERC1155 NFT on layer 1. |
|
||||
| _l2Token `indexed` | address | The address of ERC1155 NFT on layer 2. |
|
||||
| _from `indexed` | address | The address of sender on layer 1. |
|
||||
| _to | address | The address of recipient on layer 2. |
|
||||
| _tokenIds | uint256[] | The list of token ids of the ERC1155 NFT to deposit on layer 1. |
|
||||
| _amounts | uint256[] | The list of corresponding number of token to deposit on layer 1. |
|
||||
|
||||
### BatchRefundERC1155
|
||||
|
||||
@@ -410,10 +410,10 @@ Emitted when some ERC1155 token is refunded.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| token `indexed` | address | undefined |
|
||||
| recipient `indexed` | address | undefined |
|
||||
| tokenIds | uint256[] | undefined |
|
||||
| amounts | uint256[] | undefined |
|
||||
| token `indexed` | address | The address of the token in L1. |
|
||||
| recipient `indexed` | address | The address of receiver in L1. |
|
||||
| tokenIds | uint256[] | The list of ids of token refunded. |
|
||||
| amounts | uint256[] | The list of amount of token refunded. |
|
||||
|
||||
### DepositERC1155
|
||||
|
||||
@@ -429,12 +429,12 @@ Emitted when the ERC1155 NFT is deposited to gateway on layer 1.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _l1Token `indexed` | address | undefined |
|
||||
| _l2Token `indexed` | address | undefined |
|
||||
| _from `indexed` | address | undefined |
|
||||
| _to | address | undefined |
|
||||
| _tokenId | uint256 | undefined |
|
||||
| _amount | uint256 | undefined |
|
||||
| _l1Token `indexed` | address | The address of ERC1155 NFT on layer 1. |
|
||||
| _l2Token `indexed` | address | The address of ERC1155 NFT on layer 2. |
|
||||
| _from `indexed` | address | The address of sender on layer 1. |
|
||||
| _to | address | The address of recipient on layer 2. |
|
||||
| _tokenId | uint256 | The token id of the ERC1155 NFT to deposit on layer 1. |
|
||||
| _amount | uint256 | The number of token to deposit on layer 1. |
|
||||
|
||||
### FinalizeBatchWithdrawERC1155
|
||||
|
||||
@@ -450,12 +450,12 @@ Emitted when the ERC1155 NFT is batch transferred to recipient on layer 1.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _l1Token `indexed` | address | undefined |
|
||||
| _l2Token `indexed` | address | undefined |
|
||||
| _from `indexed` | address | undefined |
|
||||
| _to | address | undefined |
|
||||
| _tokenIds | uint256[] | undefined |
|
||||
| _amounts | uint256[] | undefined |
|
||||
| _l1Token `indexed` | address | The address of ERC1155 NFT on layer 1. |
|
||||
| _l2Token `indexed` | address | The address of ERC1155 NFT on layer 2. |
|
||||
| _from `indexed` | address | The address of sender on layer 2. |
|
||||
| _to | address | The address of recipient on layer 1. |
|
||||
| _tokenIds | uint256[] | The list of token ids of the ERC1155 NFT to withdraw from layer 2. |
|
||||
| _amounts | uint256[] | The list of corresponding number of token to withdraw from layer 2. |
|
||||
|
||||
### FinalizeWithdrawERC1155
|
||||
|
||||
@@ -471,12 +471,12 @@ Emitted when the ERC1155 NFT is transferred to recipient on layer 1.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _l1Token `indexed` | address | undefined |
|
||||
| _l2Token `indexed` | address | undefined |
|
||||
| _from `indexed` | address | undefined |
|
||||
| _to | address | undefined |
|
||||
| _tokenId | uint256 | undefined |
|
||||
| _amount | uint256 | undefined |
|
||||
| _l1Token `indexed` | address | The address of ERC1155 NFT on layer 1. |
|
||||
| _l2Token `indexed` | address | The address of ERC1155 NFT on layer 2. |
|
||||
| _from `indexed` | address | The address of sender on layer 2. |
|
||||
| _to | address | The address of recipient on layer 1. |
|
||||
| _tokenId | uint256 | The token id of the ERC1155 NFT to withdraw from layer 2. |
|
||||
| _amount | uint256 | The number of token to withdraw from layer 2. |
|
||||
|
||||
### Initialized
|
||||
|
||||
@@ -486,7 +486,7 @@ event Initialized(uint8 version)
|
||||
|
||||
|
||||
|
||||
|
||||
*Triggered when the contract has been initialized or reinitialized.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -525,10 +525,10 @@ Emitted when some ERC1155 token is refunded.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| token `indexed` | address | undefined |
|
||||
| recipient `indexed` | address | undefined |
|
||||
| tokenId | uint256 | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| token `indexed` | address | The address of the token in L1. |
|
||||
| recipient `indexed` | address | The address of receiver in L1. |
|
||||
| tokenId | uint256 | The id of token refunded. |
|
||||
| amount | uint256 | The amount of token refunded. |
|
||||
|
||||
### UpdateTokenMapping
|
||||
|
||||
|
||||
@@ -156,7 +156,7 @@ Initialize the storage of L1ERC721Gateway.
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _counterpart | address | The address of L2ERC721Gateway in L2. |
|
||||
| _messenger | address | The address of L1ScrollMessenger. |
|
||||
| _messenger | address | The address of L1ScrollMessenger in L1. |
|
||||
|
||||
### messenger
|
||||
|
||||
@@ -334,11 +334,11 @@ Emitted when the ERC721 NFT is batch deposited to gateway on layer 1.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _l1Token `indexed` | address | undefined |
|
||||
| _l2Token `indexed` | address | undefined |
|
||||
| _from `indexed` | address | undefined |
|
||||
| _to | address | undefined |
|
||||
| _tokenIds | uint256[] | undefined |
|
||||
| _l1Token `indexed` | address | The address of ERC721 NFT on layer 1. |
|
||||
| _l2Token `indexed` | address | The address of ERC721 NFT on layer 2. |
|
||||
| _from `indexed` | address | The address of sender on layer 1. |
|
||||
| _to | address | The address of recipient on layer 2. |
|
||||
| _tokenIds | uint256[] | The list of token ids of the ERC721 NFT to deposit on layer 1. |
|
||||
|
||||
### BatchRefundERC721
|
||||
|
||||
@@ -354,9 +354,9 @@ Emitted when a batch of ERC721 tokens are refunded.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| token `indexed` | address | undefined |
|
||||
| recipient `indexed` | address | undefined |
|
||||
| tokenIds | uint256[] | undefined |
|
||||
| token `indexed` | address | The address of the token in L1. |
|
||||
| recipient `indexed` | address | The address of receiver in L1. |
|
||||
| tokenIds | uint256[] | The list of token ids of the ERC721 NFT refunded. |
|
||||
|
||||
### DepositERC721
|
||||
|
||||
@@ -372,11 +372,11 @@ Emitted when the ERC721 NFT is deposited to gateway on layer 1.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _l1Token `indexed` | address | undefined |
|
||||
| _l2Token `indexed` | address | undefined |
|
||||
| _from `indexed` | address | undefined |
|
||||
| _to | address | undefined |
|
||||
| _tokenId | uint256 | undefined |
|
||||
| _l1Token `indexed` | address | The address of ERC721 NFT on layer 1. |
|
||||
| _l2Token `indexed` | address | The address of ERC721 NFT on layer 2. |
|
||||
| _from `indexed` | address | The address of sender on layer 1. |
|
||||
| _to | address | The address of recipient on layer 2. |
|
||||
| _tokenId | uint256 | The token id of the ERC721 NFT to deposit on layer 1. |
|
||||
|
||||
### FinalizeBatchWithdrawERC721
|
||||
|
||||
@@ -392,11 +392,11 @@ Emitted when the ERC721 NFT is batch transferred to recipient on layer 1.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _l1Token `indexed` | address | undefined |
|
||||
| _l2Token `indexed` | address | undefined |
|
||||
| _from `indexed` | address | undefined |
|
||||
| _to | address | undefined |
|
||||
| _tokenIds | uint256[] | undefined |
|
||||
| _l1Token `indexed` | address | The address of ERC721 NFT on layer 1. |
|
||||
| _l2Token `indexed` | address | The address of ERC721 NFT on layer 2. |
|
||||
| _from `indexed` | address | The address of sender on layer 2. |
|
||||
| _to | address | The address of recipient on layer 1. |
|
||||
| _tokenIds | uint256[] | The list of token ids of the ERC721 NFT to withdraw from layer 2. |
|
||||
|
||||
### FinalizeWithdrawERC721
|
||||
|
||||
@@ -412,11 +412,11 @@ Emitted when the ERC721 NFT is transferred to recipient on layer 1.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _l1Token `indexed` | address | undefined |
|
||||
| _l2Token `indexed` | address | undefined |
|
||||
| _from `indexed` | address | undefined |
|
||||
| _to | address | undefined |
|
||||
| _tokenId | uint256 | undefined |
|
||||
| _l1Token `indexed` | address | The address of ERC721 NFT on layer 1. |
|
||||
| _l2Token `indexed` | address | The address of ERC721 NFT on layer 2. |
|
||||
| _from `indexed` | address | The address of sender on layer 2. |
|
||||
| _to | address | The address of recipient on layer 1. |
|
||||
| _tokenId | uint256 | The token id of the ERC721 NFT to withdraw from layer 2. |
|
||||
|
||||
### Initialized
|
||||
|
||||
@@ -426,7 +426,7 @@ event Initialized(uint8 version)
|
||||
|
||||
|
||||
|
||||
|
||||
*Triggered when the contract has been initialized or reinitialized.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -465,9 +465,9 @@ Emitted when some ERC721 token is refunded.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| token `indexed` | address | undefined |
|
||||
| recipient `indexed` | address | undefined |
|
||||
| tokenId | uint256 | undefined |
|
||||
| token `indexed` | address | The address of the token in L1. |
|
||||
| recipient `indexed` | address | The address of receiver in L1. |
|
||||
| tokenId | uint256 | The id of token refunded. |
|
||||
|
||||
### UpdateTokenMapping
|
||||
|
||||
|
||||
@@ -168,7 +168,7 @@ function ethGateway() external view returns (address)
|
||||
|
||||
The address of L1ETHGateway.
|
||||
|
||||
*This variable is no longer used.*
|
||||
|
||||
|
||||
|
||||
#### Returns
|
||||
@@ -286,7 +286,7 @@ function initialize(address _ethGateway, address _defaultERC20Gateway) external
|
||||
|
||||
Initialize the storage of L1GatewayRouter.
|
||||
|
||||
*The parameters `_ethGateway` is no longer used.*
|
||||
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -295,23 +295,6 @@ Initialize the storage of L1GatewayRouter.
|
||||
| _ethGateway | address | The address of L1ETHGateway contract. |
|
||||
| _defaultERC20Gateway | address | The address of default ERC20 Gateway contract. |
|
||||
|
||||
### messenger
|
||||
|
||||
```solidity
|
||||
function messenger() external view returns (address)
|
||||
```
|
||||
|
||||
The address of `L1ScrollMessenger`.
|
||||
|
||||
|
||||
|
||||
|
||||
#### Returns
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _0 | address | undefined |
|
||||
|
||||
### owner
|
||||
|
||||
```solidity
|
||||
@@ -447,12 +430,12 @@ Emitted when someone deposit ERC20 token from L1 to L2.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| data | bytes | undefined |
|
||||
| l1Token `indexed` | address | The address of the token in L1. |
|
||||
| l2Token `indexed` | address | The address of the token in L2. |
|
||||
| from `indexed` | address | The address of sender in L1. |
|
||||
| to | address | The address of recipient in L2. |
|
||||
| amount | uint256 | The amount of token will be deposited from L1 to L2. |
|
||||
| data | bytes | The optional calldata passed to recipient in L2. |
|
||||
|
||||
### DepositETH
|
||||
|
||||
@@ -468,10 +451,10 @@ Emitted when someone deposit ETH from L1 to L2.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| from `indexed` | address | undefined |
|
||||
| to `indexed` | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| data | bytes | undefined |
|
||||
| from `indexed` | address | The address of sender in L1. |
|
||||
| to `indexed` | address | The address of recipient in L2. |
|
||||
| amount | uint256 | The amount of ETH will be deposited from L1 to L2. |
|
||||
| data | bytes | The optional calldata passed to recipient in L2. |
|
||||
|
||||
### FinalizeWithdrawERC20
|
||||
|
||||
@@ -487,12 +470,12 @@ Emitted when ERC20 token is withdrawn from L2 to L1 and transfer to recipient.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| data | bytes | undefined |
|
||||
| l1Token `indexed` | address | The address of the token in L1. |
|
||||
| l2Token `indexed` | address | The address of the token in L2. |
|
||||
| from `indexed` | address | The address of sender in L2. |
|
||||
| to | address | The address of recipient in L1. |
|
||||
| amount | uint256 | The amount of token withdrawn from L2 to L1. |
|
||||
| data | bytes | The optional calldata passed to recipient in L1. |
|
||||
|
||||
### FinalizeWithdrawETH
|
||||
|
||||
@@ -508,10 +491,10 @@ Emitted when ETH is withdrawn from L2 to L1 and transfer to recipient.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| from `indexed` | address | undefined |
|
||||
| to `indexed` | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| data | bytes | undefined |
|
||||
| from `indexed` | address | The address of sender in L2. |
|
||||
| to `indexed` | address | The address of recipient in L1. |
|
||||
| amount | uint256 | The amount of ETH withdrawn from L2 to L1. |
|
||||
| data | bytes | The optional calldata passed to recipient in L1. |
|
||||
|
||||
### Initialized
|
||||
|
||||
@@ -521,7 +504,7 @@ event Initialized(uint8 version)
|
||||
|
||||
|
||||
|
||||
|
||||
*Triggered when the contract has been initialized or reinitialized.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -560,9 +543,9 @@ Emitted when some ERC20 token is refunded.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| token `indexed` | address | undefined |
|
||||
| recipient `indexed` | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| token `indexed` | address | The address of the token in L1. |
|
||||
| recipient `indexed` | address | The address of receiver in L1. |
|
||||
| amount | uint256 | The amount of token refunded to receiver. |
|
||||
|
||||
### RefundETH
|
||||
|
||||
@@ -578,8 +561,8 @@ Emitted when some ETH is refunded.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| recipient `indexed` | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| recipient `indexed` | address | The address of receiver in L1. |
|
||||
| amount | uint256 | The amount of ETH refunded to receiver. |
|
||||
|
||||
### SetDefaultERC20Gateway
|
||||
|
||||
@@ -595,8 +578,8 @@ Emitted when the address of default ERC20 Gateway is updated.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| oldDefaultERC20Gateway `indexed` | address | undefined |
|
||||
| newDefaultERC20Gateway `indexed` | address | undefined |
|
||||
| oldDefaultERC20Gateway `indexed` | address | The address of the old default ERC20 Gateway. |
|
||||
| newDefaultERC20Gateway `indexed` | address | The address of the new default ERC20 Gateway. |
|
||||
|
||||
### SetERC20Gateway
|
||||
|
||||
@@ -612,9 +595,9 @@ Emitted when the `gateway` for `token` is updated.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| token `indexed` | address | undefined |
|
||||
| oldGateway `indexed` | address | undefined |
|
||||
| newGateway `indexed` | address | undefined |
|
||||
| token `indexed` | address | The address of token updated. |
|
||||
| oldGateway `indexed` | address | The corresponding address of the old gateway. |
|
||||
| newGateway `indexed` | address | The corresponding address of the new gateway. |
|
||||
|
||||
### SetETHGateway
|
||||
|
||||
@@ -630,22 +613,8 @@ Emitted when the address of ETH Gateway is updated.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| oldETHGateway `indexed` | address | undefined |
|
||||
| newEthGateway `indexed` | address | undefined |
|
||||
|
||||
|
||||
|
||||
## Errors
|
||||
|
||||
### ErrorZeroAddress
|
||||
|
||||
```solidity
|
||||
error ErrorZeroAddress()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the given address is `address(0)`.*
|
||||
| oldETHGateway `indexed` | address | The address of the old ETH Gateway. |
|
||||
| newEthGateway `indexed` | address | The address of the new ETH Gateway. |
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -471,7 +471,7 @@ Emitted when a cross domain message is failed to relay.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| messageHash `indexed` | bytes32 | undefined |
|
||||
| messageHash `indexed` | bytes32 | The hash of the message. |
|
||||
|
||||
### Initialized
|
||||
|
||||
@@ -481,7 +481,7 @@ event Initialized(uint8 version)
|
||||
|
||||
|
||||
|
||||
|
||||
*Triggered when the contract has been initialized or reinitialized.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -514,7 +514,7 @@ event Paused(address account)
|
||||
|
||||
|
||||
|
||||
|
||||
*Emitted when the pause is triggered by `account`.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -536,7 +536,7 @@ Emitted when a cross domain message is relayed successfully.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| messageHash `indexed` | bytes32 | undefined |
|
||||
| messageHash `indexed` | bytes32 | The hash of the message. |
|
||||
|
||||
### SentMessage
|
||||
|
||||
@@ -552,12 +552,12 @@ Emitted when a cross domain message is sent.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| sender `indexed` | address | undefined |
|
||||
| target `indexed` | address | undefined |
|
||||
| value | uint256 | undefined |
|
||||
| messageNonce | uint256 | undefined |
|
||||
| gasLimit | uint256 | undefined |
|
||||
| message | bytes | undefined |
|
||||
| sender `indexed` | address | The address of the sender who initiates the message. |
|
||||
| target `indexed` | address | The address of target contract to call. |
|
||||
| value | uint256 | The amount of value passed to the target contract. |
|
||||
| messageNonce | uint256 | The nonce of the message. |
|
||||
| gasLimit | uint256 | The optional gas limit passed to L1 or L2. |
|
||||
| message | bytes | The calldata passed to the target contract. |
|
||||
|
||||
### Unpaused
|
||||
|
||||
@@ -567,7 +567,7 @@ event Unpaused(address account)
|
||||
|
||||
|
||||
|
||||
|
||||
*Emitted when the pause is lifted by `account`.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -589,8 +589,8 @@ Emitted when owner updates fee vault contract.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _oldFeeVault | address | undefined |
|
||||
| _newFeeVault | address | undefined |
|
||||
| _oldFeeVault | address | The address of old fee vault contract. |
|
||||
| _newFeeVault | address | The address of new fee vault contract. |
|
||||
|
||||
### UpdateMaxReplayTimes
|
||||
|
||||
@@ -606,8 +606,8 @@ Emitted when the maximum number of times each message can be replayed is updated
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| oldMaxReplayTimes | uint256 | undefined |
|
||||
| newMaxReplayTimes | uint256 | undefined |
|
||||
| oldMaxReplayTimes | uint256 | The old maximum number of times each message can be replayed. |
|
||||
| newMaxReplayTimes | uint256 | The new maximum number of times each message can be replayed. |
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -130,7 +130,7 @@ Return the corresponding l2 token address given l1 token address.
|
||||
### initialize
|
||||
|
||||
```solidity
|
||||
function initialize(address _counterpart, address _router, address _messenger, address _l2TokenImplementation, address _l2TokenFactory) external nonpayable
|
||||
function initialize(address _counterpart, address _router, address _messenger, address, address) external nonpayable
|
||||
```
|
||||
|
||||
Initialize the storage of L1StandardERC20Gateway.
|
||||
@@ -142,10 +142,10 @@ Initialize the storage of L1StandardERC20Gateway.
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _counterpart | address | The address of L2StandardERC20Gateway in L2. |
|
||||
| _router | address | The address of L1GatewayRouter. |
|
||||
| _messenger | address | The address of L1ScrollMessenger. |
|
||||
| _l2TokenImplementation | address | The address of ScrollStandardERC20 implementation in L2. |
|
||||
| _l2TokenFactory | address | The address of ScrollStandardERC20Factory contract in L2. |
|
||||
| _router | address | The address of L1GatewayRouter in L1. |
|
||||
| _messenger | address | The address of L1ScrollMessenger in L1. |
|
||||
| _3 | address | undefined |
|
||||
| _4 | address | undefined |
|
||||
|
||||
### l2TokenFactory
|
||||
|
||||
@@ -293,12 +293,12 @@ Emitted when someone deposit ERC20 token from L1 to L2.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| data | bytes | undefined |
|
||||
| l1Token `indexed` | address | The address of the token in L1. |
|
||||
| l2Token `indexed` | address | The address of the token in L2. |
|
||||
| from `indexed` | address | The address of sender in L1. |
|
||||
| to | address | The address of recipient in L2. |
|
||||
| amount | uint256 | The amount of token will be deposited from L1 to L2. |
|
||||
| data | bytes | The optional calldata passed to recipient in L2. |
|
||||
|
||||
### FinalizeWithdrawERC20
|
||||
|
||||
@@ -314,12 +314,12 @@ Emitted when ERC20 token is withdrawn from L2 to L1 and transfer to recipient.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| data | bytes | undefined |
|
||||
| l1Token `indexed` | address | The address of the token in L1. |
|
||||
| l2Token `indexed` | address | The address of the token in L2. |
|
||||
| from `indexed` | address | The address of sender in L2. |
|
||||
| to | address | The address of recipient in L1. |
|
||||
| amount | uint256 | The amount of token withdrawn from L2 to L1. |
|
||||
| data | bytes | The optional calldata passed to recipient in L1. |
|
||||
|
||||
### Initialized
|
||||
|
||||
@@ -329,7 +329,7 @@ event Initialized(uint8 version)
|
||||
|
||||
|
||||
|
||||
|
||||
*Triggered when the contract has been initialized or reinitialized.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -368,9 +368,9 @@ Emitted when some ERC20 token is refunded.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| token `indexed` | address | undefined |
|
||||
| recipient `indexed` | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| token `indexed` | address | The address of the token in L1. |
|
||||
| recipient `indexed` | address | The address of receiver in L1. |
|
||||
| amount | uint256 | The amount of token refunded to receiver. |
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -152,15 +152,15 @@ function initialize(address _counterpart, address _router, address _messenger) e
|
||||
|
||||
Initialize the storage of L1WETHGateway.
|
||||
|
||||
|
||||
*The parameters `_counterpart`, `_router` and `_messenger` are no longer used.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _counterpart | address | The address of L2ETHGateway in L2. |
|
||||
| _router | address | The address of L1GatewayRouter. |
|
||||
| _messenger | address | The address of L1ScrollMessenger. |
|
||||
| _router | address | The address of L1GatewayRouter in L1. |
|
||||
| _messenger | address | The address of L1ScrollMessenger in L1. |
|
||||
|
||||
### l2WETH
|
||||
|
||||
@@ -291,12 +291,12 @@ Emitted when someone deposit ERC20 token from L1 to L2.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| data | bytes | undefined |
|
||||
| l1Token `indexed` | address | The address of the token in L1. |
|
||||
| l2Token `indexed` | address | The address of the token in L2. |
|
||||
| from `indexed` | address | The address of sender in L1. |
|
||||
| to | address | The address of recipient in L2. |
|
||||
| amount | uint256 | The amount of token will be deposited from L1 to L2. |
|
||||
| data | bytes | The optional calldata passed to recipient in L2. |
|
||||
|
||||
### FinalizeWithdrawERC20
|
||||
|
||||
@@ -312,12 +312,12 @@ Emitted when ERC20 token is withdrawn from L2 to L1 and transfer to recipient.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| data | bytes | undefined |
|
||||
| l1Token `indexed` | address | The address of the token in L1. |
|
||||
| l2Token `indexed` | address | The address of the token in L2. |
|
||||
| from `indexed` | address | The address of sender in L2. |
|
||||
| to | address | The address of recipient in L1. |
|
||||
| amount | uint256 | The amount of token withdrawn from L2 to L1. |
|
||||
| data | bytes | The optional calldata passed to recipient in L1. |
|
||||
|
||||
### Initialized
|
||||
|
||||
@@ -327,7 +327,7 @@ event Initialized(uint8 version)
|
||||
|
||||
|
||||
|
||||
|
||||
*Triggered when the contract has been initialized or reinitialized.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -366,9 +366,9 @@ Emitted when some ERC20 token is refunded.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| token `indexed` | address | undefined |
|
||||
| recipient `indexed` | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| token `indexed` | address | The address of the token in L1. |
|
||||
| recipient `indexed` | address | The address of receiver in L1. |
|
||||
| amount | uint256 | The amount of token refunded to receiver. |
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -373,12 +373,12 @@ Emitted when the ERC1155 NFT is batch transferred to gateway on layer 2.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| tokenIds | uint256[] | undefined |
|
||||
| amounts | uint256[] | undefined |
|
||||
| l1Token `indexed` | address | The address of ERC1155 NFT on layer 1. |
|
||||
| l2Token `indexed` | address | The address of ERC1155 NFT on layer 2. |
|
||||
| from `indexed` | address | The address of sender on layer 2. |
|
||||
| to | address | The address of recipient on layer 1. |
|
||||
| tokenIds | uint256[] | The list of token ids of the ERC1155 NFT to withdraw on layer 2. |
|
||||
| amounts | uint256[] | The list of corresponding amounts to withdraw. |
|
||||
|
||||
### FinalizeBatchDepositERC1155
|
||||
|
||||
@@ -394,12 +394,12 @@ Emitted when the ERC1155 NFT is batch transferred to recipient on layer 2.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| tokenIds | uint256[] | undefined |
|
||||
| amounts | uint256[] | undefined |
|
||||
| l1Token `indexed` | address | The address of ERC1155 NFT on layer 1. |
|
||||
| l2Token `indexed` | address | The address of ERC1155 NFT on layer 2. |
|
||||
| from `indexed` | address | The address of sender on layer 1. |
|
||||
| to | address | The address of recipient on layer 2. |
|
||||
| tokenIds | uint256[] | The list of token ids of the ERC1155 NFT deposited on layer 1. |
|
||||
| amounts | uint256[] | The list of corresponding amounts deposited. |
|
||||
|
||||
### FinalizeDepositERC1155
|
||||
|
||||
@@ -415,12 +415,12 @@ Emitted when the ERC1155 NFT is transferred to recipient on layer 2.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| tokenId | uint256 | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| l1Token `indexed` | address | The address of ERC1155 NFT on layer 1. |
|
||||
| l2Token `indexed` | address | The address of ERC1155 NFT on layer 2. |
|
||||
| from `indexed` | address | The address of sender on layer 1. |
|
||||
| to | address | The address of recipient on layer 2. |
|
||||
| tokenId | uint256 | The token id of the ERC1155 NFT deposited on layer 1. |
|
||||
| amount | uint256 | The amount of token deposited. |
|
||||
|
||||
### Initialized
|
||||
|
||||
@@ -430,7 +430,7 @@ event Initialized(uint8 version)
|
||||
|
||||
|
||||
|
||||
|
||||
*Triggered when the contract has been initialized or reinitialized.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -487,12 +487,12 @@ Emitted when the ERC1155 NFT is transferred to gateway on layer 2.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| tokenId | uint256 | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| l1Token `indexed` | address | The address of ERC1155 NFT on layer 1. |
|
||||
| l2Token `indexed` | address | The address of ERC1155 NFT on layer 2. |
|
||||
| from `indexed` | address | The address of sender on layer 2. |
|
||||
| to | address | The address of recipient on layer 1. |
|
||||
| tokenId | uint256 | The token id of the ERC1155 NFT to withdraw on layer 2. |
|
||||
| amount | uint256 | The amount of token to withdraw. |
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -318,11 +318,11 @@ Emitted when the ERC721 NFT is batch transferred to gateway on layer 2.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| tokenIds | uint256[] | undefined |
|
||||
| l1Token `indexed` | address | The address of ERC721 NFT on layer 1. |
|
||||
| l2Token `indexed` | address | The address of ERC721 NFT on layer 2. |
|
||||
| from `indexed` | address | The address of sender on layer 2. |
|
||||
| to | address | The address of recipient on layer 1. |
|
||||
| tokenIds | uint256[] | The list of token ids of the ERC721 NFT to withdraw on layer 2. |
|
||||
|
||||
### FinalizeBatchDepositERC721
|
||||
|
||||
@@ -338,11 +338,11 @@ Emitted when the ERC721 NFT is batch transferred to recipient on layer 2.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| tokenIds | uint256[] | undefined |
|
||||
| l1Token `indexed` | address | The address of ERC721 NFT on layer 1. |
|
||||
| l2Token `indexed` | address | The address of ERC721 NFT on layer 2. |
|
||||
| from `indexed` | address | The address of sender on layer 1. |
|
||||
| to | address | The address of recipient on layer 2. |
|
||||
| tokenIds | uint256[] | The list of token ids of the ERC721 NFT deposited on layer 1. |
|
||||
|
||||
### FinalizeDepositERC721
|
||||
|
||||
@@ -358,11 +358,11 @@ Emitted when the ERC721 NFT is transferred to recipient on layer 2.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| tokenId | uint256 | undefined |
|
||||
| l1Token `indexed` | address | The address of ERC721 NFT on layer 1. |
|
||||
| l2Token `indexed` | address | The address of ERC721 NFT on layer 2. |
|
||||
| from `indexed` | address | The address of sender on layer 1. |
|
||||
| to | address | The address of recipient on layer 2. |
|
||||
| tokenId | uint256 | The token id of the ERC721 NFT deposited on layer 1. |
|
||||
|
||||
### Initialized
|
||||
|
||||
@@ -372,7 +372,7 @@ event Initialized(uint8 version)
|
||||
|
||||
|
||||
|
||||
|
||||
*Triggered when the contract has been initialized or reinitialized.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -429,11 +429,11 @@ Emitted when the ERC721 NFT is transferred to gateway on layer 2.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| tokenId | uint256 | undefined |
|
||||
| l1Token `indexed` | address | The address of ERC721 NFT on layer 1. |
|
||||
| l2Token `indexed` | address | The address of ERC721 NFT on layer 2. |
|
||||
| from `indexed` | address | The address of sender on layer 2. |
|
||||
| to | address | The address of recipient on layer 1. |
|
||||
| tokenId | uint256 | The token id of the ERC721 NFT to withdraw on layer 2. |
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -189,23 +189,6 @@ function initialize(address _ethGateway, address _defaultERC20Gateway) external
|
||||
| _ethGateway | address | undefined |
|
||||
| _defaultERC20Gateway | address | undefined |
|
||||
|
||||
### messenger
|
||||
|
||||
```solidity
|
||||
function messenger() external view returns (address)
|
||||
```
|
||||
|
||||
The address of `L2ScrollMessenger`.
|
||||
|
||||
|
||||
|
||||
|
||||
#### Returns
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _0 | address | undefined |
|
||||
|
||||
### owner
|
||||
|
||||
```solidity
|
||||
@@ -428,12 +411,12 @@ Emitted when ERC20 token is deposited from L1 to L2 and transfer to recipient.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| data | bytes | undefined |
|
||||
| l1Token `indexed` | address | The address of the token in L1. |
|
||||
| l2Token `indexed` | address | The address of the token in L2. |
|
||||
| from `indexed` | address | The address of sender in L1. |
|
||||
| to | address | The address of recipient in L2. |
|
||||
| amount | uint256 | The amount of token withdrawn from L1 to L2. |
|
||||
| data | bytes | The optional calldata passed to recipient in L2. |
|
||||
|
||||
### FinalizeDepositETH
|
||||
|
||||
@@ -449,10 +432,10 @@ Emitted when ETH is deposited from L1 to L2 and transfer to recipient.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| from `indexed` | address | undefined |
|
||||
| to `indexed` | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| data | bytes | undefined |
|
||||
| from `indexed` | address | The address of sender in L1. |
|
||||
| to `indexed` | address | The address of recipient in L2. |
|
||||
| amount | uint256 | The amount of ETH deposited from L1 to L2. |
|
||||
| data | bytes | The optional calldata passed to recipient in L2. |
|
||||
|
||||
### Initialized
|
||||
|
||||
@@ -462,7 +445,7 @@ event Initialized(uint8 version)
|
||||
|
||||
|
||||
|
||||
|
||||
*Triggered when the contract has been initialized or reinitialized.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -501,8 +484,8 @@ Emitted when the address of default ERC20 Gateway is updated.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| oldDefaultERC20Gateway `indexed` | address | undefined |
|
||||
| newDefaultERC20Gateway `indexed` | address | undefined |
|
||||
| oldDefaultERC20Gateway `indexed` | address | The address of the old default ERC20 Gateway. |
|
||||
| newDefaultERC20Gateway `indexed` | address | The address of the new default ERC20 Gateway. |
|
||||
|
||||
### SetERC20Gateway
|
||||
|
||||
@@ -518,9 +501,9 @@ Emitted when the `gateway` for `token` is updated.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| token `indexed` | address | undefined |
|
||||
| oldGateway `indexed` | address | undefined |
|
||||
| newGateway `indexed` | address | undefined |
|
||||
| token `indexed` | address | The address of token updated. |
|
||||
| oldGateway `indexed` | address | The corresponding address of the old gateway. |
|
||||
| newGateway `indexed` | address | The corresponding address of the new gateway. |
|
||||
|
||||
### SetETHGateway
|
||||
|
||||
@@ -536,8 +519,8 @@ Emitted when the address of ETH Gateway is updated.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| oldETHGateway `indexed` | address | undefined |
|
||||
| newEthGateway `indexed` | address | undefined |
|
||||
| oldETHGateway `indexed` | address | The address of the old ETH Gateway. |
|
||||
| newEthGateway `indexed` | address | The address of the new ETH Gateway. |
|
||||
|
||||
### WithdrawERC20
|
||||
|
||||
@@ -553,12 +536,12 @@ Emitted when someone withdraw ERC20 token from L2 to L1.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| data | bytes | undefined |
|
||||
| l1Token `indexed` | address | The address of the token in L1. |
|
||||
| l2Token `indexed` | address | The address of the token in L2. |
|
||||
| from `indexed` | address | The address of sender in L2. |
|
||||
| to | address | The address of recipient in L1. |
|
||||
| amount | uint256 | The amount of token will be deposited from L2 to L1. |
|
||||
| data | bytes | The optional calldata passed to recipient in L1. |
|
||||
|
||||
### WithdrawETH
|
||||
|
||||
@@ -574,24 +557,10 @@ Emitted when someone withdraw ETH from L2 to L1.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| from `indexed` | address | undefined |
|
||||
| to `indexed` | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| data | bytes | undefined |
|
||||
|
||||
|
||||
|
||||
## Errors
|
||||
|
||||
### ErrorZeroAddress
|
||||
|
||||
```solidity
|
||||
error ErrorZeroAddress()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the given address is `address(0)`.*
|
||||
| from `indexed` | address | The address of sender in L2. |
|
||||
| to `indexed` | address | The address of recipient in L1. |
|
||||
| amount | uint256 | The amount of ETH will be deposited from L2 to L1. |
|
||||
| data | bytes | The optional calldata passed to recipient in L1. |
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -308,7 +308,7 @@ Emitted when a cross domain message is failed to relay.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| messageHash `indexed` | bytes32 | undefined |
|
||||
| messageHash `indexed` | bytes32 | The hash of the message. |
|
||||
|
||||
### Initialized
|
||||
|
||||
@@ -318,7 +318,7 @@ event Initialized(uint8 version)
|
||||
|
||||
|
||||
|
||||
|
||||
*Triggered when the contract has been initialized or reinitialized.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -351,7 +351,7 @@ event Paused(address account)
|
||||
|
||||
|
||||
|
||||
|
||||
*Emitted when the pause is triggered by `account`.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -373,7 +373,7 @@ Emitted when a cross domain message is relayed successfully.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| messageHash `indexed` | bytes32 | undefined |
|
||||
| messageHash `indexed` | bytes32 | The hash of the message. |
|
||||
|
||||
### SentMessage
|
||||
|
||||
@@ -389,12 +389,12 @@ Emitted when a cross domain message is sent.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| sender `indexed` | address | undefined |
|
||||
| target `indexed` | address | undefined |
|
||||
| value | uint256 | undefined |
|
||||
| messageNonce | uint256 | undefined |
|
||||
| gasLimit | uint256 | undefined |
|
||||
| message | bytes | undefined |
|
||||
| sender `indexed` | address | The address of the sender who initiates the message. |
|
||||
| target `indexed` | address | The address of target contract to call. |
|
||||
| value | uint256 | The amount of value passed to the target contract. |
|
||||
| messageNonce | uint256 | The nonce of the message. |
|
||||
| gasLimit | uint256 | The optional gas limit passed to L1 or L2. |
|
||||
| message | bytes | The calldata passed to the target contract. |
|
||||
|
||||
### Unpaused
|
||||
|
||||
@@ -404,7 +404,7 @@ event Unpaused(address account)
|
||||
|
||||
|
||||
|
||||
|
||||
*Emitted when the pause is lifted by `account`.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -426,8 +426,8 @@ Emitted when owner updates fee vault contract.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _oldFeeVault | address | undefined |
|
||||
| _newFeeVault | address | undefined |
|
||||
| _oldFeeVault | address | The address of old fee vault contract. |
|
||||
| _newFeeVault | address | The address of new fee vault contract. |
|
||||
|
||||
### UpdateMaxFailedExecutionTimes
|
||||
|
||||
@@ -443,8 +443,8 @@ Emitted when the maximum number of times each message can fail in L2 is updated.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| oldMaxFailedExecutionTimes | uint256 | undefined |
|
||||
| newMaxFailedExecutionTimes | uint256 | undefined |
|
||||
| oldMaxFailedExecutionTimes | uint256 | The old maximum number of times each message can fail in L2. |
|
||||
| newMaxFailedExecutionTimes | uint256 | The new maximum number of times each message can fail in L2. |
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -95,7 +95,7 @@ Return the corresponding l2 token address given l1 token address.
|
||||
### initialize
|
||||
|
||||
```solidity
|
||||
function initialize(address _counterpart, address _router, address _messenger, address _tokenFactory) external nonpayable
|
||||
function initialize(address _counterpart, address _router, address _messenger, address) external nonpayable
|
||||
```
|
||||
|
||||
Initialize the storage of L2StandardERC20Gateway.
|
||||
@@ -106,10 +106,10 @@ Initialize the storage of L2StandardERC20Gateway.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _counterpart | address | The address of L1ETHGateway in L1. |
|
||||
| _router | address | The address of L2GatewayRouter. |
|
||||
| _messenger | address | The address of L2ScrollMessenger. |
|
||||
| _tokenFactory | address | The address of ScrollStandardERC20Factory. |
|
||||
| _counterpart | address | The address of `L1StandardERC20Gateway` contract in L1. |
|
||||
| _router | address | The address of `L2GatewayRouter` contract in L2. |
|
||||
| _messenger | address | The address of `L2ScrollMessenger` contract in L2. |
|
||||
| _3 | address | undefined |
|
||||
|
||||
### messenger
|
||||
|
||||
@@ -281,12 +281,12 @@ Emitted when ERC20 token is deposited from L1 to L2 and transfer to recipient.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| data | bytes | undefined |
|
||||
| l1Token `indexed` | address | The address of the token in L1. |
|
||||
| l2Token `indexed` | address | The address of the token in L2. |
|
||||
| from `indexed` | address | The address of sender in L1. |
|
||||
| to | address | The address of recipient in L2. |
|
||||
| amount | uint256 | The amount of token withdrawn from L1 to L2. |
|
||||
| data | bytes | The optional calldata passed to recipient in L2. |
|
||||
|
||||
### Initialized
|
||||
|
||||
@@ -296,7 +296,7 @@ event Initialized(uint8 version)
|
||||
|
||||
|
||||
|
||||
|
||||
*Triggered when the contract has been initialized or reinitialized.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -335,12 +335,12 @@ Emitted when someone withdraw ERC20 token from L2 to L1.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| data | bytes | undefined |
|
||||
| l1Token `indexed` | address | The address of the token in L1. |
|
||||
| l2Token `indexed` | address | The address of the token in L2. |
|
||||
| from `indexed` | address | The address of sender in L2. |
|
||||
| to | address | The address of recipient in L1. |
|
||||
| amount | uint256 | The amount of token will be deposited from L2 to L1. |
|
||||
| data | bytes | The optional calldata passed to recipient in L1. |
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -297,12 +297,12 @@ Emitted when ERC20 token is deposited from L1 to L2 and transfer to recipient.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| data | bytes | undefined |
|
||||
| l1Token `indexed` | address | The address of the token in L1. |
|
||||
| l2Token `indexed` | address | The address of the token in L2. |
|
||||
| from `indexed` | address | The address of sender in L1. |
|
||||
| to | address | The address of recipient in L2. |
|
||||
| amount | uint256 | The amount of token withdrawn from L1 to L2. |
|
||||
| data | bytes | The optional calldata passed to recipient in L2. |
|
||||
|
||||
### Initialized
|
||||
|
||||
@@ -312,7 +312,7 @@ event Initialized(uint8 version)
|
||||
|
||||
|
||||
|
||||
|
||||
*Triggered when the contract has been initialized or reinitialized.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -351,12 +351,12 @@ Emitted when someone withdraw ERC20 token from L2 to L1.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| l1Token `indexed` | address | undefined |
|
||||
| l2Token `indexed` | address | undefined |
|
||||
| from `indexed` | address | undefined |
|
||||
| to | address | undefined |
|
||||
| amount | uint256 | undefined |
|
||||
| data | bytes | undefined |
|
||||
| l1Token `indexed` | address | The address of the token in L1. |
|
||||
| l2Token `indexed` | address | The address of the token in L2. |
|
||||
| from `indexed` | address | The address of sender in L2. |
|
||||
| to | address | The address of recipient in L1. |
|
||||
| amount | uint256 | The amount of token will be deposited from L2 to L1. |
|
||||
| data | bytes | The optional calldata passed to recipient in L1. |
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -91,7 +91,7 @@ function finalizeBatchWithProof(bytes _batchHeader, bytes32 _prevStateRoot, byte
|
||||
|
||||
Finalize a committed batch on layer 1.
|
||||
|
||||
|
||||
*We keep this function to upgrade to 4844 more smoothly.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -103,6 +103,27 @@ Finalize a committed batch on layer 1.
|
||||
| _withdrawRoot | bytes32 | undefined |
|
||||
| _aggrProof | bytes | undefined |
|
||||
|
||||
### finalizeBatchWithProof4844
|
||||
|
||||
```solidity
|
||||
function finalizeBatchWithProof4844(bytes _batchHeader, bytes32 _prevStateRoot, bytes32 _postStateRoot, bytes32 _withdrawRoot, bytes _blobDataProof, bytes _aggrProof) external nonpayable
|
||||
```
|
||||
|
||||
Finalize a committed batch (with blob) on layer 1.
|
||||
|
||||
*Memory layout of `_blobDataProof`: ```text | z | y | kzg_commitment | kzg_proof | |---------|---------|----------------|-----------| | bytes32 | bytes32 | bytes48 | bytes48 | ```*
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _batchHeader | bytes | undefined |
|
||||
| _prevStateRoot | bytes32 | undefined |
|
||||
| _postStateRoot | bytes32 | undefined |
|
||||
| _withdrawRoot | bytes32 | undefined |
|
||||
| _blobDataProof | bytes | undefined |
|
||||
| _aggrProof | bytes | undefined |
|
||||
|
||||
### finalizedStateRoots
|
||||
|
||||
```solidity
|
||||
@@ -493,8 +514,8 @@ Emitted when a new batch is committed.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| batchIndex `indexed` | uint256 | undefined |
|
||||
| batchHash `indexed` | bytes32 | undefined |
|
||||
| batchIndex `indexed` | uint256 | The index of the batch. |
|
||||
| batchHash `indexed` | bytes32 | The hash of the batch. |
|
||||
|
||||
### FinalizeBatch
|
||||
|
||||
@@ -510,10 +531,10 @@ Emitted when a batch is finalized.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| batchIndex `indexed` | uint256 | undefined |
|
||||
| batchHash `indexed` | bytes32 | undefined |
|
||||
| stateRoot | bytes32 | undefined |
|
||||
| withdrawRoot | bytes32 | undefined |
|
||||
| batchIndex `indexed` | uint256 | The index of the batch. |
|
||||
| batchHash `indexed` | bytes32 | The hash of the batch |
|
||||
| stateRoot | bytes32 | The state root on layer 2 after this batch. |
|
||||
| withdrawRoot | bytes32 | The merkle root on layer2 after this batch. |
|
||||
|
||||
### Initialized
|
||||
|
||||
@@ -523,7 +544,7 @@ event Initialized(uint8 version)
|
||||
|
||||
|
||||
|
||||
|
||||
*Triggered when the contract has been initialized or reinitialized.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -556,7 +577,7 @@ event Paused(address account)
|
||||
|
||||
|
||||
|
||||
|
||||
*Emitted when the pause is triggered by `account`.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -578,8 +599,8 @@ revert a pending batch.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| batchIndex `indexed` | uint256 | undefined |
|
||||
| batchHash `indexed` | bytes32 | undefined |
|
||||
| batchIndex `indexed` | uint256 | The index of the batch. |
|
||||
| batchHash `indexed` | bytes32 | The hash of the batch |
|
||||
|
||||
### Unpaused
|
||||
|
||||
@@ -589,7 +610,7 @@ event Unpaused(address account)
|
||||
|
||||
|
||||
|
||||
|
||||
*Emitted when the pause is lifted by `account`.*
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -652,6 +673,347 @@ Emitted when owner updates the status of sequencer.
|
||||
|
||||
## Errors
|
||||
|
||||
### ErrorAccountIsNotEOA
|
||||
|
||||
```solidity
|
||||
error ErrorAccountIsNotEOA()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the given account is not EOA account.*
|
||||
|
||||
|
||||
### ErrorBatchHeaderLengthTooSmall
|
||||
|
||||
```solidity
|
||||
error ErrorBatchHeaderLengthTooSmall()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the length of batch header is smaller than 89*
|
||||
|
||||
|
||||
### ErrorBatchIsAlreadyCommitted
|
||||
|
||||
```solidity
|
||||
error ErrorBatchIsAlreadyCommitted()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when committing a committed batch.*
|
||||
|
||||
|
||||
### ErrorBatchIsAlreadyVerified
|
||||
|
||||
```solidity
|
||||
error ErrorBatchIsAlreadyVerified()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when finalizing a verified batch.*
|
||||
|
||||
|
||||
### ErrorBatchIsEmpty
|
||||
|
||||
```solidity
|
||||
error ErrorBatchIsEmpty()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when committing empty batch (batch without chunks)*
|
||||
|
||||
|
||||
### ErrorCallPointEvaluationPrecompileFailed
|
||||
|
||||
```solidity
|
||||
error ErrorCallPointEvaluationPrecompileFailed()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when call precompile failed.*
|
||||
|
||||
|
||||
### ErrorCallerIsNotProver
|
||||
|
||||
```solidity
|
||||
error ErrorCallerIsNotProver()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the caller is not prover.*
|
||||
|
||||
|
||||
### ErrorCallerIsNotSequencer
|
||||
|
||||
```solidity
|
||||
error ErrorCallerIsNotSequencer()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the caller is not sequencer.*
|
||||
|
||||
|
||||
### ErrorFoundMultipleBlob
|
||||
|
||||
```solidity
|
||||
error ErrorFoundMultipleBlob()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the transaction has multiple blobs.*
|
||||
|
||||
|
||||
### ErrorGenesisBatchHasNonZeroField
|
||||
|
||||
```solidity
|
||||
error ErrorGenesisBatchHasNonZeroField()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when some fields are not zero in genesis batch.*
|
||||
|
||||
|
||||
### ErrorGenesisBatchImported
|
||||
|
||||
```solidity
|
||||
error ErrorGenesisBatchImported()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when importing genesis batch twice.*
|
||||
|
||||
|
||||
### ErrorGenesisDataHashIsZero
|
||||
|
||||
```solidity
|
||||
error ErrorGenesisDataHashIsZero()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when data hash in genesis batch is zero.*
|
||||
|
||||
|
||||
### ErrorGenesisParentBatchHashIsNonZero
|
||||
|
||||
```solidity
|
||||
error ErrorGenesisParentBatchHashIsNonZero()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the parent batch hash in genesis batch is zero.*
|
||||
|
||||
|
||||
### ErrorIncompleteL2TransactionData
|
||||
|
||||
```solidity
|
||||
error ErrorIncompleteL2TransactionData()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the l2 transaction is incomplete.*
|
||||
|
||||
|
||||
### ErrorIncorrectBatchHash
|
||||
|
||||
```solidity
|
||||
error ErrorIncorrectBatchHash()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the batch hash is incorrect.*
|
||||
|
||||
|
||||
### ErrorIncorrectBatchIndex
|
||||
|
||||
```solidity
|
||||
error ErrorIncorrectBatchIndex()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the batch index is incorrect.*
|
||||
|
||||
|
||||
### ErrorIncorrectBitmapLength
|
||||
|
||||
```solidity
|
||||
error ErrorIncorrectBitmapLength()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the bitmap length is incorrect.*
|
||||
|
||||
|
||||
### ErrorIncorrectChunkLength
|
||||
|
||||
```solidity
|
||||
error ErrorIncorrectChunkLength()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the length of chunk is incorrect.*
|
||||
|
||||
|
||||
### ErrorIncorrectPreviousStateRoot
|
||||
|
||||
```solidity
|
||||
error ErrorIncorrectPreviousStateRoot()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the previous state root doesn't match stored one.*
|
||||
|
||||
|
||||
### ErrorInvalidBatchHeaderVersion
|
||||
|
||||
```solidity
|
||||
error ErrorInvalidBatchHeaderVersion()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the batch header version is invalid.*
|
||||
|
||||
|
||||
### ErrorLastL1MessageSkipped
|
||||
|
||||
```solidity
|
||||
error ErrorLastL1MessageSkipped()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the last message is skipped.*
|
||||
|
||||
|
||||
### ErrorNoBlobFound
|
||||
|
||||
```solidity
|
||||
error ErrorNoBlobFound()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when no blob found in the transaction.*
|
||||
|
||||
|
||||
### ErrorNoBlockInChunk
|
||||
|
||||
```solidity
|
||||
error ErrorNoBlockInChunk()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when no blocks in chunk.*
|
||||
|
||||
|
||||
### ErrorNumTxsLessThanNumL1Msgs
|
||||
|
||||
```solidity
|
||||
error ErrorNumTxsLessThanNumL1Msgs()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the number of transactions is less than number of L1 message in one block.*
|
||||
|
||||
|
||||
### ErrorPreviousStateRootIsZero
|
||||
|
||||
```solidity
|
||||
error ErrorPreviousStateRootIsZero()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the given previous state is zero.*
|
||||
|
||||
|
||||
### ErrorRevertFinalizedBatch
|
||||
|
||||
```solidity
|
||||
error ErrorRevertFinalizedBatch()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when reverting a finialized batch.*
|
||||
|
||||
|
||||
### ErrorRevertNotStartFromEnd
|
||||
|
||||
```solidity
|
||||
error ErrorRevertNotStartFromEnd()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the reverted batches are not in the ending of commited batch chain.*
|
||||
|
||||
|
||||
### ErrorRevertZeroBatches
|
||||
|
||||
```solidity
|
||||
error ErrorRevertZeroBatches()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the number of batches to revert is zero.*
|
||||
|
||||
|
||||
### ErrorStateRootIsZero
|
||||
|
||||
```solidity
|
||||
error ErrorStateRootIsZero()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the given state root is zero.*
|
||||
|
||||
|
||||
### ErrorTooManyTxsInOneChunk
|
||||
|
||||
```solidity
|
||||
error ErrorTooManyTxsInOneChunk()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when a chunk contains too many transactions.*
|
||||
|
||||
|
||||
### ErrorUnexpectedPointEvaluationPrecompileOutput
|
||||
|
||||
```solidity
|
||||
error ErrorUnexpectedPointEvaluationPrecompileOutput()
|
||||
```
|
||||
|
||||
|
||||
|
||||
*Thrown when the precompile output is incorrect.*
|
||||
|
||||
|
||||
### ErrorZeroAddress
|
||||
|
||||
```solidity
|
||||
|
||||
@@ -135,8 +135,8 @@ Emitted when a l2 token is deployed.
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _l1Token `indexed` | address | undefined |
|
||||
| _l2Token `indexed` | address | undefined |
|
||||
| _l1Token `indexed` | address | The address of the l1 token. |
|
||||
| _l2Token `indexed` | address | The address of the l2 token. |
|
||||
|
||||
### OwnershipTransferred
|
||||
|
||||
|
||||
@@ -8,8 +8,8 @@ remappings = [] # a list of remapp
|
||||
libraries = [] # a list of deployed libraries to link against
|
||||
cache = true # whether to cache builds or not
|
||||
force = true # whether to ignore the cache (clean build)
|
||||
evm_version = 'london' # the evm version (by hardfork name)
|
||||
solc_version = '0.8.16' # override for the solc version (setting this ignores `auto_detect_solc`)
|
||||
# evm_version = 'london' # the evm version (by hardfork name)
|
||||
solc_version = '0.8.24' # override for the solc version (setting this ignores `auto_detect_solc`)
|
||||
optimizer = true # enable or disable the solc optimizer
|
||||
optimizer_runs = 200 # the number of optimizer runs
|
||||
verbosity = 2 # the verbosity of tests
|
||||
|
||||
@@ -2,8 +2,9 @@ import * as dotenv from "dotenv";
|
||||
|
||||
import { HardhatUserConfig, subtask } from "hardhat/config";
|
||||
import * as toml from "toml";
|
||||
import "@nomiclabs/hardhat-etherscan";
|
||||
import "@nomiclabs/hardhat-waffle";
|
||||
import "@nomicfoundation/hardhat-verify";
|
||||
import "@nomicfoundation/hardhat-ethers";
|
||||
import "@nomicfoundation/hardhat-chai-matchers";
|
||||
import "@typechain/hardhat";
|
||||
import "@primitivefi/hardhat-dodoc";
|
||||
import "hardhat-gas-reporter";
|
||||
@@ -13,16 +14,10 @@ import { TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS } from "hardhat/builtin-tasks/ta
|
||||
|
||||
dotenv.config();
|
||||
|
||||
// default values here to avoid failures when running hardhat
|
||||
const RINKEBY_RPC = process.env.RINKEBY_RPC || "1".repeat(32);
|
||||
const SCROLL_L1_RPC = process.env.SCROLL_L1_RPC || "1".repeat(32);
|
||||
const SCROLL_L2_RPC = process.env.SCROLL_L2_RPC || "1".repeat(32);
|
||||
|
||||
const RINKEBY_PRIVATE_KEY = process.env.RINKEBY_PRIVATE_KEY || "1".repeat(64);
|
||||
const L1_DEPLOYER_PRIVATE_KEY = process.env.L1_DEPLOYER_PRIVATE_KEY || "1".repeat(64);
|
||||
const L2_DEPLOYER_PRIVATE_KEY = process.env.L2_DEPLOYER_PRIVATE_KEY || "1".repeat(64);
|
||||
|
||||
const SOLC_DEFAULT = "0.8.16";
|
||||
const SOLC_DEFAULT = "0.8.24";
|
||||
|
||||
// try use forge config
|
||||
let foundry: any;
|
||||
@@ -45,29 +40,30 @@ subtask(TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS).setAction(async (_, __, runSuper
|
||||
|
||||
const config: HardhatUserConfig = {
|
||||
solidity: {
|
||||
version: foundry.default?.solc || SOLC_DEFAULT,
|
||||
version: foundry.default?.solc_version || SOLC_DEFAULT,
|
||||
settings: {
|
||||
optimizer: {
|
||||
enabled: foundry.default?.optimizer || true,
|
||||
runs: foundry.default?.optimizer_runs || 200,
|
||||
},
|
||||
evmVersion: "cancun",
|
||||
},
|
||||
},
|
||||
networks: {
|
||||
rinkeby: {
|
||||
url: RINKEBY_RPC,
|
||||
accounts: [RINKEBY_PRIVATE_KEY],
|
||||
},
|
||||
l1geth: {
|
||||
url: SCROLL_L1_RPC,
|
||||
gasPrice: 20000000000,
|
||||
gasMultiplier: 1.1,
|
||||
ethereum: {
|
||||
url: "https://1rpc.io/eth",
|
||||
accounts: [L1_DEPLOYER_PRIVATE_KEY],
|
||||
},
|
||||
l2geth: {
|
||||
url: SCROLL_L2_RPC,
|
||||
gasPrice: 20000000000,
|
||||
gasMultiplier: 1.1,
|
||||
sepolia: {
|
||||
url: "https://1rpc.io/sepolia",
|
||||
accounts: [L1_DEPLOYER_PRIVATE_KEY],
|
||||
},
|
||||
scroll: {
|
||||
url: "https://rpc.scroll.io",
|
||||
accounts: [L2_DEPLOYER_PRIVATE_KEY],
|
||||
},
|
||||
scroll_sepolia: {
|
||||
url: "https://sepolia-rpc.scroll.io",
|
||||
accounts: [L2_DEPLOYER_PRIVATE_KEY],
|
||||
},
|
||||
},
|
||||
@@ -76,13 +72,40 @@ const config: HardhatUserConfig = {
|
||||
sources: "./src",
|
||||
tests: "./integration-test",
|
||||
},
|
||||
typechain: {
|
||||
outDir: "./typechain",
|
||||
target: "ethers-v6",
|
||||
},
|
||||
gasReporter: {
|
||||
enabled: process.env.REPORT_GAS !== undefined,
|
||||
excludeContracts: ["src/test"],
|
||||
currency: "USD",
|
||||
},
|
||||
etherscan: {
|
||||
apiKey: process.env.ETHERSCAN_API_KEY,
|
||||
apiKey: {
|
||||
ethereum: process.env.ETHERSCAN_API_KEY || "",
|
||||
sepolia: process.env.ETHERSCAN_API_KEY || "",
|
||||
scroll: process.env.SCROLLSCAN_API_KEY || "",
|
||||
scroll_sepolia: process.env.SCROLLSCAN_API_KEY || "",
|
||||
},
|
||||
customChains: [
|
||||
{
|
||||
network: "scroll",
|
||||
chainId: 534352,
|
||||
urls: {
|
||||
apiURL: "https://api.scrollscan.com/api",
|
||||
browserURL: "https://www.scrollscan.com/",
|
||||
},
|
||||
},
|
||||
{
|
||||
network: "scroll_sepolia",
|
||||
chainId: 534351,
|
||||
urls: {
|
||||
apiURL: "https://api-sepolia.scrollscan.com/api",
|
||||
browserURL: "https://sepolia.scrollscan.com/",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
mocha: {
|
||||
timeout: 10000000,
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
/* eslint-disable node/no-unpublished-import */
|
||||
/* eslint-disable node/no-missing-import */
|
||||
import { HardhatEthersSigner, SignerWithAddress } from "@nomicfoundation/hardhat-ethers/signers";
|
||||
import { expect } from "chai";
|
||||
import { BigNumberish, BytesLike, constants } from "ethers";
|
||||
import { BigNumberish, BytesLike, MaxUint256, ZeroAddress, getBytes } from "ethers";
|
||||
import { ethers } from "hardhat";
|
||||
|
||||
import { EnforcedTxGateway, L1MessageQueue, L2GasPriceOracle, MockCaller } from "../typechain";
|
||||
import { SignerWithAddress } from "@nomiclabs/hardhat-ethers/signers";
|
||||
import { arrayify } from "ethers/lib/utils";
|
||||
|
||||
describe("EnforcedTxGateway.spec", async () => {
|
||||
let deployer: SignerWithAddress;
|
||||
let feeVault: SignerWithAddress;
|
||||
let signer: SignerWithAddress;
|
||||
let deployer: HardhatEthersSigner;
|
||||
let feeVault: HardhatEthersSigner;
|
||||
let signer: HardhatEthersSigner;
|
||||
|
||||
let caller: MockCaller;
|
||||
let gateway: EnforcedTxGateway;
|
||||
@@ -21,10 +21,8 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
const TransparentUpgradeableProxy = await ethers.getContractFactory("TransparentUpgradeableProxy", deployer);
|
||||
const Factory = await ethers.getContractFactory(name, deployer);
|
||||
const impl = args.length > 0 ? await Factory.deploy(...args) : await Factory.deploy();
|
||||
await impl.deployed();
|
||||
const proxy = await TransparentUpgradeableProxy.deploy(impl.address, admin, "0x");
|
||||
await proxy.deployed();
|
||||
return proxy.address;
|
||||
const proxy = await TransparentUpgradeableProxy.deploy(impl.getAddress(), admin, "0x");
|
||||
return proxy.getAddress();
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
@@ -32,66 +30,61 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
|
||||
const ProxyAdmin = await ethers.getContractFactory("ProxyAdmin", deployer);
|
||||
const admin = await ProxyAdmin.deploy();
|
||||
await admin.deployed();
|
||||
|
||||
gateway = await ethers.getContractAt(
|
||||
"EnforcedTxGateway",
|
||||
await deployProxy("EnforcedTxGateway", admin.address, []),
|
||||
await deployProxy("EnforcedTxGateway", await admin.getAddress(), []),
|
||||
deployer
|
||||
);
|
||||
|
||||
queue = await ethers.getContractAt(
|
||||
"L1MessageQueue",
|
||||
await deployProxy("L1MessageQueue", admin.address, [deployer.address, deployer.address, gateway.address]),
|
||||
await deployProxy("L1MessageQueue", await admin.getAddress(), [
|
||||
deployer.address,
|
||||
deployer.address,
|
||||
await gateway.getAddress(),
|
||||
]),
|
||||
deployer
|
||||
);
|
||||
|
||||
oracle = await ethers.getContractAt(
|
||||
"L2GasPriceOracle",
|
||||
await deployProxy("L2GasPriceOracle", admin.address, []),
|
||||
await deployProxy("L2GasPriceOracle", await admin.getAddress(), []),
|
||||
deployer
|
||||
);
|
||||
|
||||
const MockCaller = await ethers.getContractFactory("MockCaller", deployer);
|
||||
caller = await MockCaller.deploy();
|
||||
await caller.deployed();
|
||||
|
||||
await queue.initialize(
|
||||
constants.AddressZero,
|
||||
constants.AddressZero,
|
||||
constants.AddressZero,
|
||||
oracle.address,
|
||||
10000000
|
||||
);
|
||||
await gateway.initialize(queue.address, feeVault.address);
|
||||
await queue.initialize(ZeroAddress, ZeroAddress, ZeroAddress, oracle.getAddress(), 10000000);
|
||||
await gateway.initialize(queue.getAddress(), feeVault.address);
|
||||
await oracle.initialize(21000, 51000, 8, 16);
|
||||
|
||||
const Whitelist = await ethers.getContractFactory("Whitelist", deployer);
|
||||
const whitelist = await Whitelist.deploy(deployer.address);
|
||||
await whitelist.deployed();
|
||||
|
||||
await whitelist.updateWhitelistStatus([deployer.address], true);
|
||||
await oracle.updateWhitelist(whitelist.address);
|
||||
await oracle.updateWhitelist(whitelist.getAddress());
|
||||
await oracle.setL2BaseFee(1);
|
||||
});
|
||||
|
||||
context("auth", async () => {
|
||||
it("should initialize correctly", async () => {
|
||||
expect(await gateway.owner()).to.eq(deployer.address);
|
||||
expect(await gateway.messageQueue()).to.eq(queue.address);
|
||||
expect(await gateway.messageQueue()).to.eq(await queue.getAddress());
|
||||
expect(await gateway.feeVault()).to.eq(feeVault.address);
|
||||
expect(await gateway.paused()).to.eq(false);
|
||||
});
|
||||
|
||||
it("should revert, when initialize again", async () => {
|
||||
await expect(gateway.initialize(constants.AddressZero, constants.AddressZero)).to.revertedWith(
|
||||
await expect(gateway.initialize(ZeroAddress, ZeroAddress)).to.revertedWith(
|
||||
"Initializable: contract is already initialized"
|
||||
);
|
||||
});
|
||||
|
||||
context("#updateFeeVault", async () => {
|
||||
it("should revert, when non-owner call", async () => {
|
||||
await expect(gateway.connect(signer).updateFeeVault(constants.AddressZero)).to.revertedWith(
|
||||
await expect(gateway.connect(signer).updateFeeVault(ZeroAddress)).to.revertedWith(
|
||||
"Ownable: caller is not the owner"
|
||||
);
|
||||
});
|
||||
@@ -129,13 +122,13 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
});
|
||||
|
||||
it("should revert, when call is not EOA", async () => {
|
||||
const tx = await gateway.populateTransaction["sendTransaction(address,uint256,uint256,bytes)"](
|
||||
const calldata = gateway.interface.encodeFunctionData("sendTransaction(address,uint256,uint256,bytes)", [
|
||||
signer.address,
|
||||
0,
|
||||
0,
|
||||
"0x"
|
||||
);
|
||||
await expect(caller.callTarget(gateway.address, tx.data!)).to.revertedWith(
|
||||
"0x",
|
||||
]);
|
||||
await expect(caller.callTarget(gateway.getAddress(), calldata)).to.revertedWith(
|
||||
"Only EOA senders are allowed to send enforced transaction"
|
||||
);
|
||||
});
|
||||
@@ -145,12 +138,12 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
await expect(
|
||||
gateway
|
||||
.connect(signer)
|
||||
["sendTransaction(address,uint256,uint256,bytes)"](signer.address, 0, 1000000, "0x", { value: fee.sub(1) })
|
||||
["sendTransaction(address,uint256,uint256,bytes)"](signer.address, 0, 1000000, "0x", { value: fee - 1n })
|
||||
).to.revertedWith("Insufficient value for fee");
|
||||
});
|
||||
|
||||
it("should revert, when failed to deduct the fee", async () => {
|
||||
await gateway.updateFeeVault(gateway.address);
|
||||
await gateway.updateFeeVault(gateway.getAddress());
|
||||
const fee = await queue.estimateCrossDomainMessageFee(1000000);
|
||||
await expect(
|
||||
gateway
|
||||
@@ -170,7 +163,7 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
.to.emit(queue, "QueueTransaction")
|
||||
.withArgs(signer.address, deployer.address, 0, 0, 1000000, "0x");
|
||||
const feeVaultBalanceAfter = await ethers.provider.getBalance(feeVault.address);
|
||||
expect(feeVaultBalanceAfter.sub(feeVaultBalanceBefore)).to.eq(fee);
|
||||
expect(feeVaultBalanceAfter - feeVaultBalanceBefore).to.eq(fee);
|
||||
});
|
||||
|
||||
it("should succeed, with refund", async () => {
|
||||
@@ -179,17 +172,15 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
const signerBalanceBefore = await ethers.provider.getBalance(signer.address);
|
||||
const tx = await gateway
|
||||
.connect(signer)
|
||||
["sendTransaction(address,uint256,uint256,bytes)"](deployer.address, 0, 1000000, "0x", { value: fee.add(100) });
|
||||
["sendTransaction(address,uint256,uint256,bytes)"](deployer.address, 0, 1000000, "0x", { value: fee + 100n });
|
||||
await expect(tx)
|
||||
.to.emit(queue, "QueueTransaction")
|
||||
.withArgs(signer.address, deployer.address, 0, 0, 1000000, "0x");
|
||||
const receipt = await tx.wait();
|
||||
const feeVaultBalanceAfter = await ethers.provider.getBalance(feeVault.address);
|
||||
const signerBalanceAfter = await ethers.provider.getBalance(signer.address);
|
||||
expect(feeVaultBalanceAfter.sub(feeVaultBalanceBefore)).to.eq(fee);
|
||||
expect(signerBalanceBefore.sub(signerBalanceAfter)).to.eq(
|
||||
receipt.gasUsed.mul(receipt.effectiveGasPrice).add(fee)
|
||||
);
|
||||
expect(feeVaultBalanceAfter - feeVaultBalanceBefore).to.eq(fee);
|
||||
expect(signerBalanceBefore - signerBalanceAfter).to.eq(receipt!.gasUsed * receipt!.gasPrice + fee);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -203,19 +194,19 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
) => {
|
||||
const enforcedTx = {
|
||||
sender: signer.address,
|
||||
target: target,
|
||||
value: value,
|
||||
gasLimit: gasLimit,
|
||||
data: arrayify(data),
|
||||
target,
|
||||
value,
|
||||
gasLimit,
|
||||
data: getBytes(data),
|
||||
nonce: await gateway.nonces(signer.address),
|
||||
deadline: constants.MaxUint256,
|
||||
deadline: MaxUint256,
|
||||
};
|
||||
|
||||
const domain = {
|
||||
name: "EnforcedTxGateway",
|
||||
version: "1",
|
||||
chainId: (await ethers.provider.getNetwork()).chainId,
|
||||
verifyingContract: gateway.address,
|
||||
verifyingContract: await gateway.getAddress(),
|
||||
};
|
||||
|
||||
const types = {
|
||||
@@ -251,7 +242,7 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
],
|
||||
};
|
||||
|
||||
const signature = await signer._signTypedData(domain, types, enforcedTx);
|
||||
const signature = await signer.signTypedData(domain, types, enforcedTx);
|
||||
return signature;
|
||||
};
|
||||
|
||||
@@ -266,15 +257,15 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
0,
|
||||
0,
|
||||
"0x",
|
||||
constants.MaxUint256,
|
||||
MaxUint256,
|
||||
"0x",
|
||||
constants.AddressZero
|
||||
ZeroAddress
|
||||
)
|
||||
).to.revertedWith("Pausable: paused");
|
||||
});
|
||||
|
||||
it("should revert, when signature expired", async () => {
|
||||
const timestamp = (await ethers.provider.getBlock("latest")).timestamp;
|
||||
const timestamp = (await ethers.provider.getBlock("latest"))!.timestamp;
|
||||
await expect(
|
||||
gateway
|
||||
.connect(deployer)
|
||||
@@ -286,7 +277,7 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
"0x",
|
||||
timestamp - 1,
|
||||
"0x",
|
||||
constants.AddressZero
|
||||
ZeroAddress
|
||||
)
|
||||
).to.revertedWith("signature expired");
|
||||
});
|
||||
@@ -302,9 +293,9 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
0,
|
||||
0,
|
||||
"0x",
|
||||
constants.MaxUint256,
|
||||
MaxUint256,
|
||||
signature,
|
||||
constants.AddressZero
|
||||
ZeroAddress
|
||||
)
|
||||
).to.revertedWith("Incorrect signature");
|
||||
});
|
||||
@@ -321,16 +312,16 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
0,
|
||||
1000000,
|
||||
"0x",
|
||||
constants.MaxUint256,
|
||||
MaxUint256,
|
||||
signature,
|
||||
signer.address,
|
||||
{ value: fee.sub(1) }
|
||||
{ value: fee - 1n }
|
||||
)
|
||||
).to.revertedWith("Insufficient value for fee");
|
||||
});
|
||||
|
||||
it("should revert, when failed to deduct the fee", async () => {
|
||||
await gateway.updateFeeVault(gateway.address);
|
||||
await gateway.updateFeeVault(gateway.getAddress());
|
||||
const signature = await getSignature(signer, signer.address, 0, 1000000, "0x");
|
||||
const fee = await queue.estimateCrossDomainMessageFee(1000000);
|
||||
await expect(
|
||||
@@ -342,7 +333,7 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
0,
|
||||
1000000,
|
||||
"0x",
|
||||
constants.MaxUint256,
|
||||
MaxUint256,
|
||||
signature,
|
||||
signer.address,
|
||||
{ value: fee }
|
||||
@@ -364,7 +355,7 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
0,
|
||||
1000000,
|
||||
"0x",
|
||||
constants.MaxUint256,
|
||||
MaxUint256,
|
||||
signature,
|
||||
signer.address,
|
||||
{ value: fee }
|
||||
@@ -374,7 +365,7 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
.withArgs(signer.address, deployer.address, 0, 0, 1000000, "0x");
|
||||
expect(await gateway.nonces(signer.address)).to.eq(1);
|
||||
const feeVaultBalanceAfter = await ethers.provider.getBalance(feeVault.address);
|
||||
expect(feeVaultBalanceAfter.sub(feeVaultBalanceBefore)).to.eq(fee);
|
||||
expect(feeVaultBalanceAfter - feeVaultBalanceBefore).to.eq(fee);
|
||||
|
||||
// use the same nonce to sign should fail
|
||||
await expect(
|
||||
@@ -386,7 +377,7 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
0,
|
||||
1000000,
|
||||
"0x",
|
||||
constants.MaxUint256,
|
||||
MaxUint256,
|
||||
signature,
|
||||
signer.address,
|
||||
{ value: fee }
|
||||
@@ -409,10 +400,10 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
0,
|
||||
1000000,
|
||||
"0x",
|
||||
constants.MaxUint256,
|
||||
MaxUint256,
|
||||
signature,
|
||||
signer.address,
|
||||
{ value: fee.add(100) }
|
||||
{ value: fee + 100n }
|
||||
)
|
||||
)
|
||||
.to.emit(queue, "QueueTransaction")
|
||||
@@ -420,8 +411,8 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
expect(await gateway.nonces(signer.address)).to.eq(1);
|
||||
const feeVaultBalanceAfter = await ethers.provider.getBalance(feeVault.address);
|
||||
const signerBalanceAfter = await ethers.provider.getBalance(signer.address);
|
||||
expect(feeVaultBalanceAfter.sub(feeVaultBalanceBefore)).to.eq(fee);
|
||||
expect(signerBalanceAfter.sub(signerBalanceBefore)).to.eq(100);
|
||||
expect(feeVaultBalanceAfter - feeVaultBalanceBefore).to.eq(fee);
|
||||
expect(signerBalanceAfter - signerBalanceBefore).to.eq(100n);
|
||||
|
||||
// use the same nonce to sign should fail
|
||||
await expect(
|
||||
@@ -433,10 +424,10 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
0,
|
||||
1000000,
|
||||
"0x",
|
||||
constants.MaxUint256,
|
||||
MaxUint256,
|
||||
signature,
|
||||
signer.address,
|
||||
{ value: fee.add(100) }
|
||||
{ value: fee + 100n }
|
||||
)
|
||||
).to.revertedWith("Incorrect signature");
|
||||
});
|
||||
@@ -453,10 +444,10 @@ describe("EnforcedTxGateway.spec", async () => {
|
||||
0,
|
||||
1000000,
|
||||
"0x1234",
|
||||
constants.MaxUint256,
|
||||
MaxUint256,
|
||||
signature,
|
||||
gateway.address,
|
||||
{ value: fee.add(100) }
|
||||
gateway.getAddress(),
|
||||
{ value: fee + 100n }
|
||||
)
|
||||
).to.revertedWith("Failed to refund the fee");
|
||||
});
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
/* eslint-disable node/no-missing-import */
|
||||
/* eslint-disable node/no-unpublished-import */
|
||||
import { SignerWithAddress } from "@nomiclabs/hardhat-ethers/signers";
|
||||
import { HardhatEthersSigner } from "@nomicfoundation/hardhat-ethers/signers";
|
||||
import { expect } from "chai";
|
||||
import { BigNumber, BigNumberish, ContractTransaction, constants } from "ethers";
|
||||
import { keccak256 } from "ethers/lib/utils";
|
||||
import { BigNumberish, ContractTransactionResponse, MaxUint256, keccak256, toQuantity } from "ethers";
|
||||
import { ethers, network } from "hardhat";
|
||||
|
||||
import {
|
||||
@@ -24,31 +23,27 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
const L2_MESSAGE_QUEUE = "0x5300000000000000000000000000000000000000";
|
||||
const SCROLL_CHAIN = "0xa13BAF47339d63B743e7Da8741db5456DAc1E556";
|
||||
|
||||
let deployer: SignerWithAddress;
|
||||
let deployer: HardhatEthersSigner;
|
||||
|
||||
let proxyAdmin: ProxyAdmin;
|
||||
|
||||
const mockERC20Balance = async (tokenAddress: string, balance: BigNumber, slot: BigNumberish) => {
|
||||
const mockERC20Balance = async (tokenAddress: string, balance: bigint, slot: BigNumberish) => {
|
||||
const storageSlot = keccak256(
|
||||
ethers.utils.defaultAbiCoder.encode(["address", "uint256"], [deployer.address, slot])
|
||||
ethers.AbiCoder.defaultAbiCoder().encode(["address", "uint256"], [deployer.address, slot])
|
||||
);
|
||||
await ethers.provider.send("hardhat_setStorageAt", [
|
||||
tokenAddress,
|
||||
storageSlot,
|
||||
ethers.utils.hexlify(ethers.utils.zeroPad(balance.toHexString(), 32)),
|
||||
]);
|
||||
await ethers.provider.send("hardhat_setStorageAt", [tokenAddress, storageSlot, toQuantity(balance)]);
|
||||
const token = await ethers.getContractAt("MockERC20", tokenAddress, deployer);
|
||||
expect(await token.balanceOf(deployer.address)).to.eq(balance);
|
||||
};
|
||||
|
||||
const mockETHBalance = async (balance: BigNumber) => {
|
||||
await network.provider.send("hardhat_setBalance", [deployer.address, balance.toHexString()]);
|
||||
expect(await deployer.getBalance()).to.eq(balance);
|
||||
const mockETHBalance = async (balance: bigint) => {
|
||||
await network.provider.send("hardhat_setBalance", [deployer.address, toQuantity(balance)]);
|
||||
expect(await ethers.provider.getBalance(deployer.address)).to.eq(balance);
|
||||
};
|
||||
|
||||
const showGasUsage = async (tx: ContractTransaction, desc: string) => {
|
||||
const showGasUsage = async (tx: ContractTransactionResponse, desc: string) => {
|
||||
const receipt = await tx.wait();
|
||||
console.log(`${desc}: GasUsed[${receipt.gasUsed}]`);
|
||||
console.log(`${desc}: GasUsed[${receipt!.gasUsed}]`);
|
||||
};
|
||||
|
||||
context("L1 upgrade", async () => {
|
||||
@@ -59,7 +54,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
// fork network
|
||||
const provider = new ethers.providers.JsonRpcProvider("https://rpc.ankr.com/eth");
|
||||
const provider = new ethers.JsonRpcProvider("https://rpc.ankr.com/eth");
|
||||
if (!forkBlock) {
|
||||
forkBlock = (await provider.getBlockNumber()) - 10;
|
||||
}
|
||||
@@ -81,14 +76,14 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
|
||||
// mock eth balance
|
||||
deployer = await ethers.getSigner("0x1100000000000000000000000000000000000011");
|
||||
await mockETHBalance(ethers.utils.parseEther("1000"));
|
||||
await mockETHBalance(ethers.parseEther("1000"));
|
||||
|
||||
// mock owner of proxy admin
|
||||
proxyAdmin = await ethers.getContractAt("ProxyAdmin", "0xEB803eb3F501998126bf37bB823646Ed3D59d072", deployer);
|
||||
await ethers.provider.send("hardhat_setStorageAt", [
|
||||
proxyAdmin.address,
|
||||
await proxyAdmin.getAddress(),
|
||||
"0x0",
|
||||
ethers.utils.hexlify(ethers.utils.zeroPad(deployer.address, 32)),
|
||||
ethers.AbiCoder.defaultAbiCoder().encode(["address"], [deployer.address]),
|
||||
]);
|
||||
expect(await proxyAdmin.owner()).to.eq(deployer.address);
|
||||
|
||||
@@ -107,9 +102,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
const ScrollChain = await ethers.getContractFactory("ScrollChain", deployer);
|
||||
await proxyAdmin.upgrade(
|
||||
L1_MESSENGER,
|
||||
(
|
||||
await L1ScrollMessenger.deploy(L2_MESSENGER, SCROLL_CHAIN, L1_MESSAGE_QUEUE)
|
||||
).address
|
||||
(await L1ScrollMessenger.deploy(L2_MESSENGER, SCROLL_CHAIN, L1_MESSAGE_QUEUE)).getAddress()
|
||||
);
|
||||
await proxyAdmin.upgrade(
|
||||
L1_MESSAGE_QUEUE,
|
||||
@@ -119,14 +112,12 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
SCROLL_CHAIN,
|
||||
"0x72CAcBcfDe2d1e19122F8A36a4d6676cd39d7A5d"
|
||||
)
|
||||
).address
|
||||
).getAddress()
|
||||
);
|
||||
await queue.initializeV2();
|
||||
await proxyAdmin.upgrade(
|
||||
SCROLL_CHAIN,
|
||||
(
|
||||
await ScrollChain.deploy(534352, L1_MESSAGE_QUEUE, "0xA2Ab526e5C5491F10FC05A55F064BF9F7CEf32a0")
|
||||
).address
|
||||
(await ScrollChain.deploy(534352, L1_MESSAGE_QUEUE, "0xA2Ab526e5C5491F10FC05A55F064BF9F7CEf32a0")).getAddress()
|
||||
);
|
||||
};
|
||||
|
||||
@@ -136,40 +127,40 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
const L1ETHGateway = await ethers.getContractFactory("L1ETHGateway", deployer);
|
||||
const impl = await L1ETHGateway.deploy(L2_GATEWAY, L1_ROUTER, L1_MESSENGER);
|
||||
const gateway = await ethers.getContractAt("L1ETHGateway", L1_GATEWAY, deployer);
|
||||
const amountIn = ethers.utils.parseEther("1");
|
||||
const amountIn = ethers.parseEther("1");
|
||||
const fee = await queue.estimateCrossDomainMessageFee(1e6);
|
||||
|
||||
// before upgrade
|
||||
await showGasUsage(
|
||||
await gateway["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn.add(fee) }),
|
||||
await gateway["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn + fee }),
|
||||
"L1ETHGateway.depositETH before upgrade"
|
||||
);
|
||||
await showGasUsage(
|
||||
await router["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn.add(fee) }),
|
||||
await router["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn + fee }),
|
||||
"L1GatewayRouter.depositETH before upgrade"
|
||||
);
|
||||
await showGasUsage(
|
||||
await messenger["sendMessage(address,uint256,bytes,uint256)"](deployer.address, amountIn, "0x", 1e6, {
|
||||
value: amountIn.add(fee),
|
||||
value: amountIn + fee,
|
||||
}),
|
||||
"L1ScrollMessenger.sendMessage before upgrade"
|
||||
);
|
||||
|
||||
// do upgrade
|
||||
await upgradeL1(L1_GATEWAY, impl.address);
|
||||
await upgradeL1(L1_GATEWAY, await impl.getAddress());
|
||||
|
||||
// after upgrade
|
||||
await showGasUsage(
|
||||
await gateway["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn.add(fee) }),
|
||||
await gateway["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn + fee }),
|
||||
"L1ETHGateway.depositETH after upgrade"
|
||||
);
|
||||
await showGasUsage(
|
||||
await router["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn.add(fee) }),
|
||||
await router["depositETH(uint256,uint256)"](amountIn, 1e6, { value: amountIn + fee }),
|
||||
"L1GatewayRouter.depositETH after upgrade"
|
||||
);
|
||||
await showGasUsage(
|
||||
await messenger["sendMessage(address,uint256,bytes,uint256)"](deployer.address, amountIn, "0x", 1e6, {
|
||||
value: amountIn.add(fee),
|
||||
value: amountIn + fee,
|
||||
}),
|
||||
"L1ScrollMessenger.sendMessage after upgrade"
|
||||
);
|
||||
@@ -183,12 +174,12 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
const L1WETHGateway = await ethers.getContractFactory("L1WETHGateway", deployer);
|
||||
const impl = await L1WETHGateway.deploy(L1_WETH, L2_WETH, L2_GATEWAY, L1_ROUTER, L1_MESSENGER);
|
||||
const gateway = await ethers.getContractAt("L1WETHGateway", L1_GATEWAY, deployer);
|
||||
const amountIn = ethers.utils.parseEther("1");
|
||||
const amountIn = ethers.parseEther("1");
|
||||
const fee = await queue.estimateCrossDomainMessageFee(1e6);
|
||||
const token = await ethers.getContractAt("MockERC20", L1_WETH, deployer);
|
||||
await mockERC20Balance(token.address, amountIn.mul(10), 3);
|
||||
await token.approve(L1_GATEWAY, constants.MaxUint256);
|
||||
await token.approve(L1_ROUTER, constants.MaxUint256);
|
||||
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 3);
|
||||
await token.approve(L1_GATEWAY, MaxUint256);
|
||||
await token.approve(L1_ROUTER, MaxUint256);
|
||||
|
||||
// before upgrade
|
||||
await showGasUsage(
|
||||
@@ -201,7 +192,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
);
|
||||
|
||||
// do upgrade
|
||||
await upgradeL1(L1_GATEWAY, impl.address);
|
||||
await upgradeL1(L1_GATEWAY, await impl.getAddress());
|
||||
|
||||
// after upgrade
|
||||
await showGasUsage(
|
||||
@@ -227,12 +218,12 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
"0x66e5312EDeEAef6e80759A0F789e7914Fb401484"
|
||||
);
|
||||
const gateway = await ethers.getContractAt("L1StandardERC20Gateway", L1_GATEWAY, deployer);
|
||||
const amountIn = ethers.utils.parseUnits("1", 6);
|
||||
const amountIn = ethers.parseUnits("1", 6);
|
||||
const fee = await queue.estimateCrossDomainMessageFee(1e6);
|
||||
const token = await ethers.getContractAt("MockERC20", L1_USDT, deployer);
|
||||
await mockERC20Balance(token.address, amountIn.mul(10), 2);
|
||||
await token.approve(L1_GATEWAY, constants.MaxUint256);
|
||||
await token.approve(L1_ROUTER, constants.MaxUint256);
|
||||
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 2);
|
||||
await token.approve(L1_GATEWAY, MaxUint256);
|
||||
await token.approve(L1_ROUTER, MaxUint256);
|
||||
|
||||
// before upgrade
|
||||
await showGasUsage(
|
||||
@@ -245,7 +236,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
);
|
||||
|
||||
// do upgrade
|
||||
await upgradeL1(L1_GATEWAY, impl.address);
|
||||
await upgradeL1(L1_GATEWAY, await impl.getAddress());
|
||||
|
||||
// after upgrade
|
||||
await showGasUsage(
|
||||
@@ -265,12 +256,12 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
const L1CustomERC20Gateway = await ethers.getContractFactory("L1CustomERC20Gateway", deployer);
|
||||
const impl = await L1CustomERC20Gateway.deploy(L2_GATEWAY, L1_ROUTER, L1_MESSENGER);
|
||||
const gateway = await ethers.getContractAt("L1CustomERC20Gateway", L1_GATEWAY, deployer);
|
||||
const amountIn = ethers.utils.parseUnits("1", 18);
|
||||
const amountIn = ethers.parseUnits("1", 18);
|
||||
const fee = await queue.estimateCrossDomainMessageFee(1e6);
|
||||
const token = await ethers.getContractAt("MockERC20", L1_DAI, deployer);
|
||||
await mockERC20Balance(token.address, amountIn.mul(10), 2);
|
||||
await token.approve(L1_GATEWAY, constants.MaxUint256);
|
||||
await token.approve(L1_ROUTER, constants.MaxUint256);
|
||||
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 2);
|
||||
await token.approve(L1_GATEWAY, MaxUint256);
|
||||
await token.approve(L1_ROUTER, MaxUint256);
|
||||
|
||||
// before upgrade
|
||||
await showGasUsage(
|
||||
@@ -283,7 +274,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
);
|
||||
|
||||
// do upgrade
|
||||
await upgradeL1(L1_GATEWAY, impl.address);
|
||||
await upgradeL1(L1_GATEWAY, await impl.getAddress());
|
||||
|
||||
// after upgrade
|
||||
await showGasUsage(
|
||||
@@ -304,12 +295,12 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
const L1USDCGateway = await ethers.getContractFactory("L1USDCGateway", deployer);
|
||||
const impl = await L1USDCGateway.deploy(L1_USDC, L2_USDC, L2_GATEWAY, L1_ROUTER, L1_MESSENGER);
|
||||
const gateway = await ethers.getContractAt("L1USDCGateway", L1_GATEWAY, deployer);
|
||||
const amountIn = ethers.utils.parseUnits("1", 6);
|
||||
const amountIn = ethers.parseUnits("1", 6);
|
||||
const fee = await queue.estimateCrossDomainMessageFee(1e6);
|
||||
const token = await ethers.getContractAt("MockERC20", L1_USDC, deployer);
|
||||
await mockERC20Balance(token.address, amountIn.mul(10), 9);
|
||||
await token.approve(L1_GATEWAY, constants.MaxUint256);
|
||||
await token.approve(L1_ROUTER, constants.MaxUint256);
|
||||
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 9);
|
||||
await token.approve(L1_GATEWAY, MaxUint256);
|
||||
await token.approve(L1_ROUTER, MaxUint256);
|
||||
|
||||
// before upgrade
|
||||
await showGasUsage(
|
||||
@@ -322,7 +313,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
);
|
||||
|
||||
// do upgrade
|
||||
await upgradeL1(L1_GATEWAY, impl.address);
|
||||
await upgradeL1(L1_GATEWAY, await impl.getAddress());
|
||||
|
||||
// after upgrade
|
||||
await showGasUsage(
|
||||
@@ -343,12 +334,12 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
const L1LidoGateway = await ethers.getContractFactory("L1LidoGateway", deployer);
|
||||
const impl = await L1LidoGateway.deploy(L1_WSTETH, L2_WSTETH, L2_GATEWAY, L1_ROUTER, L1_MESSENGER);
|
||||
const gateway = await ethers.getContractAt("L1LidoGateway", L1_GATEWAY, deployer);
|
||||
const amountIn = ethers.utils.parseUnits("1", 6);
|
||||
const amountIn = ethers.parseUnits("1", 6);
|
||||
const fee = await queue.estimateCrossDomainMessageFee(1e6);
|
||||
const token = await ethers.getContractAt("MockERC20", L1_WSTETH, deployer);
|
||||
await mockERC20Balance(token.address, amountIn.mul(10), 0);
|
||||
await token.approve(L1_GATEWAY, constants.MaxUint256);
|
||||
await token.approve(L1_ROUTER, constants.MaxUint256);
|
||||
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 0);
|
||||
await token.approve(L1_GATEWAY, MaxUint256);
|
||||
await token.approve(L1_ROUTER, MaxUint256);
|
||||
|
||||
// before upgrade
|
||||
await showGasUsage(
|
||||
@@ -361,7 +352,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
);
|
||||
|
||||
// do upgrade
|
||||
await upgradeL1(L1_GATEWAY, impl.address);
|
||||
await upgradeL1(L1_GATEWAY, await impl.getAddress());
|
||||
await gateway.initializeV2(deployer.address, deployer.address, deployer.address, deployer.address);
|
||||
|
||||
// after upgrade
|
||||
@@ -383,7 +374,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
// fork network
|
||||
const provider = new ethers.providers.JsonRpcProvider("https://rpc.scroll.io");
|
||||
const provider = new ethers.JsonRpcProvider("https://rpc.scroll.io");
|
||||
if (!forkBlock) {
|
||||
forkBlock = (await provider.getBlockNumber()) - 31;
|
||||
}
|
||||
@@ -405,14 +396,14 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
|
||||
// mock eth balance
|
||||
deployer = await ethers.getSigner("0x1100000000000000000000000000000000000011");
|
||||
await mockETHBalance(ethers.utils.parseEther("1000"));
|
||||
await mockETHBalance(ethers.parseEther("1000"));
|
||||
|
||||
// mock owner of proxy admin
|
||||
proxyAdmin = await ethers.getContractAt("ProxyAdmin", "0xA76acF000C890b0DD7AEEf57627d9899F955d026", deployer);
|
||||
await ethers.provider.send("hardhat_setStorageAt", [
|
||||
proxyAdmin.address,
|
||||
await proxyAdmin.getAddress(),
|
||||
"0x0",
|
||||
ethers.utils.hexlify(ethers.utils.zeroPad(deployer.address, 32)),
|
||||
ethers.AbiCoder.defaultAbiCoder().encode(["address"], [deployer.address]),
|
||||
]);
|
||||
expect(await proxyAdmin.owner()).to.eq(deployer.address);
|
||||
|
||||
@@ -423,7 +414,10 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
const upgradeL2 = async (proxy: string, impl: string) => {
|
||||
await proxyAdmin.upgrade(proxy, impl);
|
||||
const L2ScrollMessenger = await ethers.getContractFactory("L2ScrollMessenger", deployer);
|
||||
await proxyAdmin.upgrade(L2_MESSENGER, (await L2ScrollMessenger.deploy(L1_MESSENGER, L2_MESSAGE_QUEUE)).address);
|
||||
await proxyAdmin.upgrade(
|
||||
L2_MESSENGER,
|
||||
(await L2ScrollMessenger.deploy(L1_MESSENGER, L2_MESSAGE_QUEUE)).getAddress()
|
||||
);
|
||||
};
|
||||
|
||||
it.skip("should succeed on L2ETHGateway", async () => {
|
||||
@@ -432,7 +426,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
const L2ETHGateway = await ethers.getContractFactory("L2ETHGateway", deployer);
|
||||
const impl = await L2ETHGateway.deploy(L1_GATEWAY, L2_ROUTER, L2_MESSENGER);
|
||||
const gateway = await ethers.getContractAt("L2ETHGateway", L2_GATEWAY, deployer);
|
||||
const amountIn = ethers.utils.parseEther("1");
|
||||
const amountIn = ethers.parseEther("1");
|
||||
|
||||
// before upgrade
|
||||
await showGasUsage(
|
||||
@@ -451,7 +445,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
);
|
||||
|
||||
// do upgrade
|
||||
await upgradeL2(L2_GATEWAY, impl.address);
|
||||
await upgradeL2(L2_GATEWAY, await impl.getAddress());
|
||||
|
||||
// after upgrade
|
||||
await showGasUsage(
|
||||
@@ -478,11 +472,11 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
const L2WETHGateway = await ethers.getContractFactory("L2WETHGateway", deployer);
|
||||
const impl = await L2WETHGateway.deploy(L2_WETH, L1_WETH, L1_GATEWAY, L2_ROUTER, L2_MESSENGER);
|
||||
const gateway = await ethers.getContractAt("L2WETHGateway", L2_GATEWAY, deployer);
|
||||
const amountIn = ethers.utils.parseEther("1");
|
||||
const amountIn = ethers.parseEther("1");
|
||||
const token = await ethers.getContractAt("MockERC20", L2_WETH, deployer);
|
||||
await mockERC20Balance(token.address, amountIn.mul(10), 0);
|
||||
await token.approve(L2_GATEWAY, constants.MaxUint256);
|
||||
await token.approve(L2_ROUTER, constants.MaxUint256);
|
||||
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 0);
|
||||
await token.approve(L2_GATEWAY, MaxUint256);
|
||||
await token.approve(L2_ROUTER, MaxUint256);
|
||||
|
||||
// before upgrade
|
||||
await showGasUsage(
|
||||
@@ -495,7 +489,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
);
|
||||
|
||||
// do upgrade
|
||||
await upgradeL2(L2_GATEWAY, impl.address);
|
||||
await upgradeL2(L2_GATEWAY, await impl.getAddress());
|
||||
|
||||
// after upgrade
|
||||
await showGasUsage(
|
||||
@@ -520,11 +514,11 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
"0x66e5312EDeEAef6e80759A0F789e7914Fb401484"
|
||||
);
|
||||
const gateway = await ethers.getContractAt("L2StandardERC20Gateway", L2_GATEWAY, deployer);
|
||||
const amountIn = ethers.utils.parseUnits("1", 6);
|
||||
const amountIn = ethers.parseUnits("1", 6);
|
||||
const token = await ethers.getContractAt("MockERC20", L2_USDT, deployer);
|
||||
await mockERC20Balance(token.address, amountIn.mul(10), 51);
|
||||
await token.approve(L2_GATEWAY, constants.MaxUint256);
|
||||
await token.approve(L2_ROUTER, constants.MaxUint256);
|
||||
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 51);
|
||||
await token.approve(L2_GATEWAY, MaxUint256);
|
||||
await token.approve(L2_ROUTER, MaxUint256);
|
||||
|
||||
// before upgrade
|
||||
await showGasUsage(
|
||||
@@ -537,7 +531,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
);
|
||||
|
||||
// do upgrade
|
||||
await upgradeL2(L2_GATEWAY, impl.address);
|
||||
await upgradeL2(L2_GATEWAY, await impl.getAddress());
|
||||
|
||||
// after upgrade
|
||||
await showGasUsage(
|
||||
@@ -557,11 +551,11 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
const L2CustomERC20Gateway = await ethers.getContractFactory("L2CustomERC20Gateway", deployer);
|
||||
const impl = await L2CustomERC20Gateway.deploy(L1_GATEWAY, L2_ROUTER, L2_MESSENGER);
|
||||
const gateway = await ethers.getContractAt("L2CustomERC20Gateway", L2_GATEWAY, deployer);
|
||||
const amountIn = ethers.utils.parseUnits("1", 18);
|
||||
const amountIn = ethers.parseUnits("1", 18);
|
||||
const token = await ethers.getContractAt("MockERC20", L2_DAI, deployer);
|
||||
await mockERC20Balance(token.address, amountIn.mul(10), 51);
|
||||
await token.approve(L1_GATEWAY, constants.MaxUint256);
|
||||
await token.approve(L1_ROUTER, constants.MaxUint256);
|
||||
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 51);
|
||||
await token.approve(L1_GATEWAY, MaxUint256);
|
||||
await token.approve(L1_ROUTER, MaxUint256);
|
||||
|
||||
// before upgrade
|
||||
await showGasUsage(
|
||||
@@ -574,7 +568,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
);
|
||||
|
||||
// do upgrade
|
||||
await upgradeL2(L2_GATEWAY, impl.address);
|
||||
await upgradeL2(L2_GATEWAY, await impl.getAddress());
|
||||
|
||||
// after upgrade
|
||||
await showGasUsage(
|
||||
@@ -595,11 +589,11 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
const L2USDCGateway = await ethers.getContractFactory("L2USDCGateway", deployer);
|
||||
const impl = await L2USDCGateway.deploy(L1_USDC, L2_USDC, L1_GATEWAY, L2_ROUTER, L2_MESSENGER);
|
||||
const gateway = await ethers.getContractAt("L2USDCGateway", L2_GATEWAY, deployer);
|
||||
const amountIn = ethers.utils.parseUnits("1", 6);
|
||||
const amountIn = ethers.parseUnits("1", 6);
|
||||
const token = await ethers.getContractAt("MockERC20", L2_USDC, deployer);
|
||||
await mockERC20Balance(token.address, amountIn.mul(10), 9);
|
||||
await token.approve(L2_GATEWAY, constants.MaxUint256);
|
||||
await token.approve(L2_ROUTER, constants.MaxUint256);
|
||||
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 9);
|
||||
await token.approve(L2_GATEWAY, MaxUint256);
|
||||
await token.approve(L2_ROUTER, MaxUint256);
|
||||
|
||||
// before upgrade
|
||||
await showGasUsage(
|
||||
@@ -612,7 +606,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
);
|
||||
|
||||
// do upgrade
|
||||
await upgradeL2(L2_GATEWAY, impl.address);
|
||||
await upgradeL2(L2_GATEWAY, await impl.getAddress());
|
||||
|
||||
// after upgrade
|
||||
await showGasUsage(
|
||||
@@ -633,11 +627,11 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
const L2LidoGateway = await ethers.getContractFactory("L2LidoGateway", deployer);
|
||||
const impl = await L2LidoGateway.deploy(L1_WSTETH, L2_WSTETH, L1_GATEWAY, L2_ROUTER, L2_MESSENGER);
|
||||
const gateway = await ethers.getContractAt("L2LidoGateway", L2_GATEWAY, deployer);
|
||||
const amountIn = ethers.utils.parseUnits("1", 6);
|
||||
const amountIn = ethers.parseUnits("1", 6);
|
||||
const token = await ethers.getContractAt("MockERC20", L2_WSTETH, deployer);
|
||||
await mockERC20Balance(token.address, amountIn.mul(10), 51);
|
||||
await token.approve(L2_GATEWAY, constants.MaxUint256);
|
||||
await token.approve(L2_ROUTER, constants.MaxUint256);
|
||||
await mockERC20Balance(await token.getAddress(), amountIn * 10n, 51);
|
||||
await token.approve(L2_GATEWAY, MaxUint256);
|
||||
await token.approve(L2_ROUTER, MaxUint256);
|
||||
|
||||
// before upgrade
|
||||
await showGasUsage(
|
||||
@@ -650,7 +644,7 @@ describe("GasOptimizationUpgrade.spec", async () => {
|
||||
);
|
||||
|
||||
// do upgrade
|
||||
await upgradeL2(L2_GATEWAY, impl.address);
|
||||
await upgradeL2(L2_GATEWAY, await impl.getAddress());
|
||||
await gateway.initializeV2(deployer.address, deployer.address, deployer.address, deployer.address);
|
||||
|
||||
// after upgrade
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
/* eslint-disable node/no-unpublished-import */
|
||||
/* eslint-disable node/no-missing-import */
|
||||
import { ethers } from "hardhat";
|
||||
import { GasSwap, ERC2771Forwarder, MockERC20, MockGasSwapTarget } from "../typechain";
|
||||
import { SignerWithAddress } from "@nomiclabs/hardhat-ethers/signers";
|
||||
import { HardhatEthersSigner } from "@nomicfoundation/hardhat-ethers/signers";
|
||||
import { expect } from "chai";
|
||||
import { BigNumber, constants } from "ethers";
|
||||
import { splitSignature } from "ethers/lib/utils";
|
||||
import { MaxUint256, Signature, ZeroAddress, ZeroHash, toBigInt } from "ethers";
|
||||
import { ethers } from "hardhat";
|
||||
|
||||
import { GasSwap, ERC2771Forwarder, MockERC20, MockGasSwapTarget } from "../typechain";
|
||||
|
||||
describe("GasSwap.spec", async () => {
|
||||
let deployer: SignerWithAddress;
|
||||
let signer: SignerWithAddress;
|
||||
let deployer: HardhatEthersSigner;
|
||||
let signer: HardhatEthersSigner;
|
||||
|
||||
let forwarder: ERC2771Forwarder;
|
||||
let swap: GasSwap;
|
||||
@@ -21,19 +21,15 @@ describe("GasSwap.spec", async () => {
|
||||
|
||||
const ERC2771Forwarder = await ethers.getContractFactory("ERC2771Forwarder", deployer);
|
||||
forwarder = await ERC2771Forwarder.deploy("ERC2771Forwarder");
|
||||
await forwarder.deployed();
|
||||
|
||||
const GasSwap = await ethers.getContractFactory("GasSwap", deployer);
|
||||
swap = await GasSwap.deploy(forwarder.address);
|
||||
await swap.deployed();
|
||||
swap = await GasSwap.deploy(forwarder.getAddress());
|
||||
|
||||
const MockGasSwapTarget = await ethers.getContractFactory("MockGasSwapTarget", deployer);
|
||||
target = await MockGasSwapTarget.deploy();
|
||||
await target.deployed();
|
||||
|
||||
const MockERC20 = await ethers.getContractFactory("MockERC20", deployer);
|
||||
token = await MockERC20.deploy("x", "y", 18);
|
||||
await token.deployed();
|
||||
});
|
||||
|
||||
context("auth", async () => {
|
||||
@@ -43,11 +39,11 @@ describe("GasSwap.spec", async () => {
|
||||
|
||||
context("#updateFeeRatio", async () => {
|
||||
it("should revert, when non-owner call", async () => {
|
||||
await expect(swap.connect(signer).updateFeeRatio(1)).to.revertedWith("caller is not the owner");
|
||||
await expect(swap.connect(signer).updateFeeRatio(1)).to.revertedWith("Ownable: caller is not the owner");
|
||||
});
|
||||
|
||||
it("should succeed", async () => {
|
||||
expect(await swap.feeRatio()).to.eq(constants.AddressZero);
|
||||
expect(await swap.feeRatio()).to.eq(ZeroAddress);
|
||||
await expect(swap.updateFeeRatio(100)).to.emit(swap, "UpdateFeeRatio").withArgs(100);
|
||||
expect(await swap.feeRatio()).to.eq(100);
|
||||
});
|
||||
@@ -55,66 +51,62 @@ describe("GasSwap.spec", async () => {
|
||||
|
||||
context("#updateApprovedTarget", async () => {
|
||||
it("should revert, when non-owner call", async () => {
|
||||
await expect(swap.connect(signer).updateApprovedTarget(target.address, false)).to.revertedWith(
|
||||
"caller is not the owner"
|
||||
await expect(swap.connect(signer).updateApprovedTarget(target.getAddress(), false)).to.revertedWith(
|
||||
"Ownable: caller is not the owner"
|
||||
);
|
||||
});
|
||||
|
||||
it("should succeed", async () => {
|
||||
expect(await swap.approvedTargets(target.address)).to.eq(false);
|
||||
await expect(swap.updateApprovedTarget(target.address, true))
|
||||
expect(await swap.approvedTargets(target.getAddress())).to.eq(false);
|
||||
await expect(swap.updateApprovedTarget(target.getAddress(), true))
|
||||
.to.emit(swap, "UpdateApprovedTarget")
|
||||
.withArgs(target.address, true);
|
||||
expect(await swap.approvedTargets(target.address)).to.eq(true);
|
||||
await expect(swap.updateApprovedTarget(target.address, false))
|
||||
.withArgs(await target.getAddress(), true);
|
||||
expect(await swap.approvedTargets(target.getAddress())).to.eq(true);
|
||||
await expect(swap.updateApprovedTarget(target.getAddress(), false))
|
||||
.to.emit(swap, "UpdateApprovedTarget")
|
||||
.withArgs(target.address, false);
|
||||
expect(await swap.approvedTargets(target.address)).to.eq(false);
|
||||
.withArgs(await target.getAddress(), false);
|
||||
expect(await swap.approvedTargets(target.getAddress())).to.eq(false);
|
||||
});
|
||||
});
|
||||
|
||||
context("#withdraw", async () => {
|
||||
it("should revert, when non-owner call", async () => {
|
||||
await expect(swap.connect(signer).withdraw(constants.AddressZero, 0)).to.revertedWith(
|
||||
"caller is not the owner"
|
||||
);
|
||||
await expect(swap.connect(signer).withdraw(ZeroAddress, 0)).to.revertedWith("Ownable: caller is not the owner");
|
||||
});
|
||||
|
||||
it("should succeed, when withdraw ETH", async () => {
|
||||
await deployer.sendTransaction({ to: swap.address, value: ethers.utils.parseEther("1") });
|
||||
const balanceBefore = await deployer.getBalance();
|
||||
const tx = await swap.withdraw(constants.AddressZero, ethers.utils.parseEther("1"));
|
||||
await deployer.sendTransaction({ to: swap.getAddress(), value: ethers.parseEther("1") });
|
||||
const balanceBefore = await ethers.provider.getBalance(deployer.address);
|
||||
const tx = await swap.withdraw(ZeroAddress, ethers.parseEther("1"));
|
||||
const receipt = await tx.wait();
|
||||
const balanceAfter = await deployer.getBalance();
|
||||
expect(balanceAfter.sub(balanceBefore)).to.eq(
|
||||
ethers.utils.parseEther("1").sub(receipt.gasUsed.mul(receipt.effectiveGasPrice))
|
||||
);
|
||||
const balanceAfter = await ethers.provider.getBalance(deployer.address);
|
||||
expect(balanceAfter - balanceBefore).to.eq(ethers.parseEther("1") - receipt!.gasUsed * receipt!.gasPrice);
|
||||
});
|
||||
|
||||
it("should succeed, when withdraw token", async () => {
|
||||
await token.mint(swap.address, ethers.utils.parseEther("1"));
|
||||
await token.mint(swap.getAddress(), ethers.parseEther("1"));
|
||||
const balanceBefore = await token.balanceOf(deployer.address);
|
||||
await swap.withdraw(token.address, ethers.utils.parseEther("1"));
|
||||
await swap.withdraw(token.getAddress(), ethers.parseEther("1"));
|
||||
const balanceAfter = await token.balanceOf(deployer.address);
|
||||
expect(balanceAfter.sub(balanceBefore)).to.eq(ethers.utils.parseEther("1"));
|
||||
expect(balanceAfter - balanceBefore).to.eq(ethers.parseEther("1"));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
const permit = async (amount: BigNumber) => {
|
||||
const permit = async (amount: bigint) => {
|
||||
const value = {
|
||||
owner: signer.address,
|
||||
spender: swap.address,
|
||||
spender: await swap.getAddress(),
|
||||
value: amount,
|
||||
nonce: await token.nonces(signer.address),
|
||||
deadline: constants.MaxUint256,
|
||||
deadline: MaxUint256,
|
||||
};
|
||||
|
||||
const domain = {
|
||||
name: await token.name(),
|
||||
version: "1",
|
||||
chainId: (await ethers.provider.getNetwork()).chainId,
|
||||
verifyingContract: token.address,
|
||||
verifyingContract: await token.getAddress(),
|
||||
};
|
||||
|
||||
const types = {
|
||||
@@ -142,7 +134,7 @@ describe("GasSwap.spec", async () => {
|
||||
],
|
||||
};
|
||||
|
||||
const signature = splitSignature(await signer._signTypedData(domain, types, value));
|
||||
const signature = Signature.from(await signer.signTypedData(domain, types, value));
|
||||
return signature;
|
||||
};
|
||||
|
||||
@@ -151,15 +143,15 @@ describe("GasSwap.spec", async () => {
|
||||
await expect(
|
||||
swap.swap(
|
||||
{
|
||||
token: token.address,
|
||||
token: token.getAddress(),
|
||||
value: 0,
|
||||
deadline: 0,
|
||||
r: constants.HashZero,
|
||||
s: constants.HashZero,
|
||||
r: ZeroHash,
|
||||
s: ZeroHash,
|
||||
v: 0,
|
||||
},
|
||||
{
|
||||
target: target.address,
|
||||
target: target.getAddress(),
|
||||
data: "0x",
|
||||
minOutput: 0,
|
||||
}
|
||||
@@ -168,121 +160,119 @@ describe("GasSwap.spec", async () => {
|
||||
});
|
||||
|
||||
it("should revert, when insufficient output amount", async () => {
|
||||
const amountIn = ethers.utils.parseEther("1");
|
||||
const amountOut = ethers.utils.parseEther("2");
|
||||
const amountIn = ethers.parseEther("1");
|
||||
const amountOut = ethers.parseEther("2");
|
||||
await token.mint(signer.address, amountIn);
|
||||
await deployer.sendTransaction({ to: target.address, value: amountOut });
|
||||
await deployer.sendTransaction({ to: target.getAddress(), value: amountOut });
|
||||
const signature = await permit(amountIn);
|
||||
|
||||
await target.setToken(token.address);
|
||||
await target.setToken(token.getAddress());
|
||||
await target.setAmountIn(amountIn);
|
||||
|
||||
await swap.updateApprovedTarget(target.address, true);
|
||||
await swap.updateApprovedTarget(target.getAddress(), true);
|
||||
await expect(
|
||||
swap.connect(signer).swap(
|
||||
{
|
||||
token: token.address,
|
||||
token: await token.getAddress(),
|
||||
value: amountIn,
|
||||
deadline: constants.MaxUint256,
|
||||
deadline: MaxUint256,
|
||||
r: signature.r,
|
||||
s: signature.s,
|
||||
v: signature.v,
|
||||
},
|
||||
{
|
||||
target: target.address,
|
||||
target: target.getAddress(),
|
||||
data: "0x8119c065",
|
||||
minOutput: amountOut.add(1),
|
||||
minOutput: amountOut + 1n,
|
||||
}
|
||||
)
|
||||
).to.revertedWith("insufficient output amount");
|
||||
});
|
||||
|
||||
for (const refundRatio of ["0", "1", "5"]) {
|
||||
for (const refundRatio of [0n, 1n, 5n]) {
|
||||
for (const feeRatio of ["0", "5", "50"]) {
|
||||
it(`should succeed, when swap by signer directly, with feeRatio[${feeRatio}%] refundRatio[${refundRatio}%]`, async () => {
|
||||
const amountIn = ethers.utils.parseEther("1");
|
||||
const amountOut = ethers.utils.parseEther("2");
|
||||
const amountIn = ethers.parseEther("1");
|
||||
const amountOut = ethers.parseEther("2");
|
||||
await token.mint(signer.address, amountIn);
|
||||
await deployer.sendTransaction({ to: target.address, value: amountOut });
|
||||
await deployer.sendTransaction({ to: target.getAddress(), value: amountOut });
|
||||
const signature = await permit(amountIn);
|
||||
|
||||
await target.setToken(token.address);
|
||||
await target.setToken(token.getAddress());
|
||||
await target.setAmountIn(amountIn);
|
||||
await target.setRefund(amountIn.mul(refundRatio).div(100));
|
||||
await target.setRefund((amountIn * refundRatio) / 100n);
|
||||
|
||||
await swap.updateApprovedTarget(target.address, true);
|
||||
await swap.updateFeeRatio(ethers.utils.parseEther(feeRatio).div(100));
|
||||
const fee = amountOut.mul(feeRatio).div(100);
|
||||
await swap.updateApprovedTarget(target.getAddress(), true);
|
||||
await swap.updateFeeRatio(ethers.parseEther(feeRatio) / 100n);
|
||||
const fee = (amountOut * toBigInt(feeRatio)) / 100n;
|
||||
|
||||
const balanceBefore = await signer.getBalance();
|
||||
const balanceBefore = await ethers.provider.getBalance(signer.address);
|
||||
const tx = await swap.connect(signer).swap(
|
||||
{
|
||||
token: token.address,
|
||||
token: await token.getAddress(),
|
||||
value: amountIn,
|
||||
deadline: constants.MaxUint256,
|
||||
deadline: MaxUint256,
|
||||
r: signature.r,
|
||||
s: signature.s,
|
||||
v: signature.v,
|
||||
},
|
||||
{
|
||||
target: target.address,
|
||||
target: target.getAddress(),
|
||||
data: "0x8119c065",
|
||||
minOutput: amountOut.sub(fee),
|
||||
minOutput: amountOut - fee,
|
||||
}
|
||||
);
|
||||
const receipt = await tx.wait();
|
||||
const balanceAfter = await signer.getBalance();
|
||||
expect(balanceAfter.sub(balanceBefore)).to.eq(
|
||||
amountOut.sub(fee).sub(receipt.gasUsed.mul(receipt.effectiveGasPrice))
|
||||
);
|
||||
expect(await token.balanceOf(signer.address)).to.eq(amountIn.mul(refundRatio).div(100));
|
||||
const balanceAfter = await ethers.provider.getBalance(signer.address);
|
||||
expect(balanceAfter - balanceBefore).to.eq(amountOut - fee - receipt!.gasUsed * receipt!.gasPrice);
|
||||
expect(await token.balanceOf(signer.address)).to.eq((amountIn * refundRatio) / 100n);
|
||||
});
|
||||
|
||||
it(`should succeed, when swap by signer with forwarder, with feeRatio[${feeRatio}%] refundRatio[${refundRatio}%]`, async () => {
|
||||
const amountIn = ethers.utils.parseEther("1");
|
||||
const amountOut = ethers.utils.parseEther("2");
|
||||
const amountIn = ethers.parseEther("1");
|
||||
const amountOut = ethers.parseEther("2");
|
||||
await token.mint(signer.address, amountIn);
|
||||
await deployer.sendTransaction({ to: target.address, value: amountOut });
|
||||
await deployer.sendTransaction({ to: await target.getAddress(), value: amountOut });
|
||||
const permitSignature = await permit(amountIn);
|
||||
|
||||
await target.setToken(token.address);
|
||||
await target.setToken(token.getAddress());
|
||||
await target.setAmountIn(amountIn);
|
||||
await target.setRefund(amountIn.mul(refundRatio).div(100));
|
||||
await target.setRefund((amountIn * refundRatio) / 100n);
|
||||
|
||||
await swap.updateApprovedTarget(target.address, true);
|
||||
await swap.updateFeeRatio(ethers.utils.parseEther(feeRatio).div(100));
|
||||
const fee = amountOut.mul(feeRatio).div(100);
|
||||
await swap.updateApprovedTarget(target.getAddress(), true);
|
||||
await swap.updateFeeRatio(ethers.parseEther(feeRatio) / 100n);
|
||||
const fee = (amountOut * toBigInt(feeRatio)) / 100n;
|
||||
|
||||
const reqWithoutSignature = {
|
||||
from: signer.address,
|
||||
to: swap.address,
|
||||
value: constants.Zero,
|
||||
to: await swap.getAddress(),
|
||||
value: 0n,
|
||||
gas: 1000000,
|
||||
nonce: await forwarder.nonces(signer.address),
|
||||
deadline: 2000000000,
|
||||
data: swap.interface.encodeFunctionData("swap", [
|
||||
{
|
||||
token: token.address,
|
||||
token: await token.getAddress(),
|
||||
value: amountIn,
|
||||
deadline: constants.MaxUint256,
|
||||
deadline: MaxUint256,
|
||||
r: permitSignature.r,
|
||||
s: permitSignature.s,
|
||||
v: permitSignature.v,
|
||||
},
|
||||
{
|
||||
target: target.address,
|
||||
target: await target.getAddress(),
|
||||
data: "0x8119c065",
|
||||
minOutput: amountOut.sub(fee),
|
||||
minOutput: amountOut - fee,
|
||||
},
|
||||
]),
|
||||
};
|
||||
|
||||
const signature = await signer._signTypedData(
|
||||
const signature = await signer.signTypedData(
|
||||
{
|
||||
name: "ERC2771Forwarder",
|
||||
version: "1",
|
||||
chainId: (await ethers.provider.getNetwork()).chainId,
|
||||
verifyingContract: forwarder.address,
|
||||
verifyingContract: await forwarder.getAddress(),
|
||||
},
|
||||
{
|
||||
ForwardRequest: [
|
||||
@@ -319,7 +309,7 @@ describe("GasSwap.spec", async () => {
|
||||
reqWithoutSignature
|
||||
);
|
||||
|
||||
const balanceBefore = await signer.getBalance();
|
||||
const balanceBefore = await ethers.provider.getBalance(signer.address);
|
||||
await forwarder.execute({
|
||||
from: reqWithoutSignature.from,
|
||||
to: reqWithoutSignature.to,
|
||||
@@ -329,9 +319,9 @@ describe("GasSwap.spec", async () => {
|
||||
data: reqWithoutSignature.data,
|
||||
signature,
|
||||
});
|
||||
const balanceAfter = await signer.getBalance();
|
||||
expect(balanceAfter.sub(balanceBefore)).to.eq(amountOut.sub(fee));
|
||||
expect(await token.balanceOf(signer.address)).to.eq(amountIn.mul(refundRatio).div(100));
|
||||
const balanceAfter = await ethers.provider.getBalance(signer.address);
|
||||
expect(balanceAfter - balanceBefore).to.eq(amountOut - fee);
|
||||
expect(await token.balanceOf(signer.address)).to.eq((amountIn * refundRatio) / 100n);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable node/no-unpublished-import */
|
||||
/* eslint-disable node/no-missing-import */
|
||||
import { expect } from "chai";
|
||||
import { BigNumber, BigNumberish, constants } from "ethers";
|
||||
import { concat, RLP } from "ethers/lib/utils";
|
||||
import { BigNumberish, ZeroHash, concat, encodeRlp, toBeHex, toBigInt } from "ethers";
|
||||
import { ethers } from "hardhat";
|
||||
|
||||
import { L1BlockContainer } from "../typechain";
|
||||
|
||||
interface IImportTestConfig {
|
||||
@@ -90,7 +90,7 @@ const testcases: Array<IImportTestConfig> = [
|
||||
];
|
||||
|
||||
function encodeHeader(test: IImportTestConfig): string {
|
||||
return RLP.encode([
|
||||
return encodeRlp([
|
||||
test.parentHash,
|
||||
test.uncleHash,
|
||||
test.coinbase,
|
||||
@@ -98,15 +98,15 @@ function encodeHeader(test: IImportTestConfig): string {
|
||||
test.transactionsRoot,
|
||||
test.receiptsRoot,
|
||||
test.logsBloom,
|
||||
BigNumber.from(test.difficulty).isZero() ? "0x" : BigNumber.from(test.difficulty).toHexString(),
|
||||
BigNumber.from(test.blockHeight).toHexString(),
|
||||
BigNumber.from(test.gasLimit).toHexString(),
|
||||
BigNumber.from(test.gasUsed).toHexString(),
|
||||
BigNumber.from(test.blockTimestamp).toHexString(),
|
||||
toBigInt(test.difficulty) === 0n ? "0x" : toBeHex(test.difficulty),
|
||||
toBeHex(test.blockHeight),
|
||||
toBeHex(test.gasLimit),
|
||||
toBeHex(test.gasUsed),
|
||||
toBeHex(test.blockTimestamp),
|
||||
test.extraData,
|
||||
test.mixHash,
|
||||
test.blockNonce,
|
||||
BigNumber.from(test.baseFee).toHexString(),
|
||||
toBeHex(test.baseFee),
|
||||
]);
|
||||
}
|
||||
|
||||
@@ -124,7 +124,7 @@ describe("L1BlockContainer", async () => {
|
||||
const whitelist = await Whitelist.deploy(deployer.address);
|
||||
await whitelist.updateWhitelistStatus([deployer.address], true);
|
||||
|
||||
await container.updateWhitelist(whitelist.address);
|
||||
await container.updateWhitelist(whitelist.getAddress());
|
||||
});
|
||||
|
||||
it("should revert, when sender not allowed", async () => {
|
||||
@@ -137,7 +137,7 @@ describe("L1BlockContainer", async () => {
|
||||
test.stateRoot
|
||||
);
|
||||
|
||||
await expect(container.connect(signer).importBlockHeader(constants.HashZero, [], false)).to.revertedWith(
|
||||
await expect(container.connect(signer).importBlockHeader(ZeroHash, "0x", false)).to.revertedWith(
|
||||
"Not whitelisted sender"
|
||||
);
|
||||
});
|
||||
@@ -172,7 +172,7 @@ describe("L1BlockContainer", async () => {
|
||||
|
||||
it("should revert, when parent not imported", async () => {
|
||||
await container.initialize(
|
||||
constants.HashZero,
|
||||
ZeroHash,
|
||||
test.blockHeight - 1,
|
||||
test.blockTimestamp - 1,
|
||||
test.baseFee,
|
||||
|
||||
@@ -1,18 +1,29 @@
|
||||
/* eslint-disable node/no-unpublished-import */
|
||||
/* eslint-disable node/no-missing-import */
|
||||
import { HardhatEthersSigner } from "@nomicfoundation/hardhat-ethers/signers";
|
||||
import { expect } from "chai";
|
||||
import { BigNumber, constants } from "ethers";
|
||||
import { concat, getAddress, hexlify, keccak256, randomBytes, RLP, stripZeros } from "ethers/lib/utils";
|
||||
import { ethers } from "hardhat";
|
||||
|
||||
import { L1MessageQueue, L2GasPriceOracle } from "../typechain";
|
||||
import { SignerWithAddress } from "@nomiclabs/hardhat-ethers/signers";
|
||||
import {
|
||||
MaxUint256,
|
||||
ZeroAddress,
|
||||
concat,
|
||||
encodeRlp,
|
||||
getAddress,
|
||||
hexlify,
|
||||
keccak256,
|
||||
randomBytes,
|
||||
toBeHex,
|
||||
toBigInt,
|
||||
} from "ethers";
|
||||
|
||||
describe("L1MessageQueue", async () => {
|
||||
let deployer: SignerWithAddress;
|
||||
let scrollChain: SignerWithAddress;
|
||||
let messenger: SignerWithAddress;
|
||||
let gateway: SignerWithAddress;
|
||||
let signer: SignerWithAddress;
|
||||
let deployer: HardhatEthersSigner;
|
||||
let scrollChain: HardhatEthersSigner;
|
||||
let messenger: HardhatEthersSigner;
|
||||
let gateway: HardhatEthersSigner;
|
||||
let signer: HardhatEthersSigner;
|
||||
|
||||
let oracle: L2GasPriceOracle;
|
||||
let queue: L1MessageQueue;
|
||||
@@ -21,10 +32,8 @@ describe("L1MessageQueue", async () => {
|
||||
const TransparentUpgradeableProxy = await ethers.getContractFactory("TransparentUpgradeableProxy", deployer);
|
||||
const Factory = await ethers.getContractFactory(name, deployer);
|
||||
const impl = args.length > 0 ? await Factory.deploy(...args) : await Factory.deploy();
|
||||
await impl.deployed();
|
||||
const proxy = await TransparentUpgradeableProxy.deploy(impl.address, admin, "0x");
|
||||
await proxy.deployed();
|
||||
return proxy.address;
|
||||
const proxy = await TransparentUpgradeableProxy.deploy(impl.getAddress(), admin, "0x");
|
||||
return proxy.getAddress();
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
@@ -32,22 +41,25 @@ describe("L1MessageQueue", async () => {
|
||||
|
||||
const ProxyAdmin = await ethers.getContractFactory("ProxyAdmin", deployer);
|
||||
const admin = await ProxyAdmin.deploy();
|
||||
await admin.deployed();
|
||||
|
||||
queue = await ethers.getContractAt(
|
||||
"L1MessageQueue",
|
||||
await deployProxy("L1MessageQueue", admin.address, [messenger.address, scrollChain.address, gateway.address]),
|
||||
await deployProxy("L1MessageQueue", await admin.getAddress(), [
|
||||
messenger.address,
|
||||
scrollChain.address,
|
||||
gateway.address,
|
||||
]),
|
||||
deployer
|
||||
);
|
||||
|
||||
oracle = await ethers.getContractAt(
|
||||
"L2GasPriceOracle",
|
||||
await deployProxy("L2GasPriceOracle", admin.address, []),
|
||||
await deployProxy("L2GasPriceOracle", await admin.getAddress(), []),
|
||||
deployer
|
||||
);
|
||||
|
||||
await oracle.initialize(21000, 50000, 8, 16);
|
||||
await queue.initialize(messenger.address, scrollChain.address, constants.AddressZero, oracle.address, 10000000);
|
||||
await queue.initialize(messenger.address, scrollChain.address, ZeroAddress, oracle.getAddress(), 10000000);
|
||||
});
|
||||
|
||||
context("auth", async () => {
|
||||
@@ -56,28 +68,28 @@ describe("L1MessageQueue", async () => {
|
||||
expect(await queue.messenger()).to.eq(messenger.address);
|
||||
expect(await queue.scrollChain()).to.eq(scrollChain.address);
|
||||
expect(await queue.enforcedTxGateway()).to.eq(gateway.address);
|
||||
expect(await queue.gasOracle()).to.eq(oracle.address);
|
||||
expect(await queue.gasOracle()).to.eq(await oracle.getAddress());
|
||||
expect(await queue.maxGasLimit()).to.eq(10000000);
|
||||
});
|
||||
|
||||
it("should revert, when initialize again", async () => {
|
||||
await expect(
|
||||
queue.initialize(constants.AddressZero, constants.AddressZero, constants.AddressZero, constants.AddressZero, 0)
|
||||
).to.revertedWith("Initializable: contract is already initialized");
|
||||
await expect(queue.initialize(ZeroAddress, ZeroAddress, ZeroAddress, ZeroAddress, 0)).to.revertedWith(
|
||||
"Initializable: contract is already initialized"
|
||||
);
|
||||
});
|
||||
|
||||
context("#updateGasOracle", async () => {
|
||||
it("should revert, when non-owner call", async () => {
|
||||
await expect(queue.connect(signer).updateGasOracle(constants.AddressZero)).to.revertedWith(
|
||||
await expect(queue.connect(signer).updateGasOracle(ZeroAddress)).to.revertedWith(
|
||||
"Ownable: caller is not the owner"
|
||||
);
|
||||
});
|
||||
|
||||
it("should succeed", async () => {
|
||||
expect(await queue.gasOracle()).to.eq(oracle.address);
|
||||
expect(await queue.gasOracle()).to.eq(await oracle.getAddress());
|
||||
await expect(queue.updateGasOracle(deployer.address))
|
||||
.to.emit(queue, "UpdateGasOracle")
|
||||
.withArgs(oracle.address, deployer.address);
|
||||
.withArgs(await oracle.getAddress(), deployer.address);
|
||||
expect(await queue.gasOracle()).to.eq(deployer.address);
|
||||
});
|
||||
});
|
||||
@@ -101,30 +113,9 @@ describe("L1MessageQueue", async () => {
|
||||
const target = "0xcb18150e4efefb6786130e289a5f61a82a5b86d7";
|
||||
const transactionType = "0x7E";
|
||||
|
||||
for (const nonce of [
|
||||
BigNumber.from(0),
|
||||
BigNumber.from(1),
|
||||
BigNumber.from(127),
|
||||
BigNumber.from(128),
|
||||
BigNumber.from(22334455),
|
||||
constants.MaxUint256,
|
||||
]) {
|
||||
for (const value of [
|
||||
BigNumber.from(0),
|
||||
BigNumber.from(1),
|
||||
BigNumber.from(127),
|
||||
BigNumber.from(128),
|
||||
BigNumber.from(22334455),
|
||||
constants.MaxUint256,
|
||||
]) {
|
||||
for (const gasLimit of [
|
||||
BigNumber.from(0),
|
||||
BigNumber.from(1),
|
||||
BigNumber.from(127),
|
||||
BigNumber.from(128),
|
||||
BigNumber.from(22334455),
|
||||
constants.MaxUint256,
|
||||
]) {
|
||||
for (const nonce of [0n, 1n, 127n, 128n, 22334455n, MaxUint256]) {
|
||||
for (const value of [0n, 1n, 127n, 128n, 22334455n, MaxUint256]) {
|
||||
for (const gasLimit of [0n, 1n, 127n, 128n, 22334455n, MaxUint256]) {
|
||||
for (const dataLen of [0, 1, 2, 3, 4, 55, 56, 100]) {
|
||||
const tests = [randomBytes(dataLen)];
|
||||
if (dataLen === 1) {
|
||||
@@ -133,11 +124,11 @@ describe("L1MessageQueue", async () => {
|
||||
}
|
||||
}
|
||||
for (const data of tests) {
|
||||
const transactionPayload = RLP.encode([
|
||||
stripZeros(nonce.toHexString()),
|
||||
stripZeros(gasLimit.toHexString()),
|
||||
const transactionPayload = encodeRlp([
|
||||
nonce === 0n ? "0x" : toBeHex(nonce),
|
||||
gasLimit === 0n ? "0x" : toBeHex(gasLimit),
|
||||
target,
|
||||
stripZeros(value.toHexString()),
|
||||
value === 0n ? "0x" : toBeHex(value),
|
||||
data,
|
||||
sender,
|
||||
]);
|
||||
@@ -159,30 +150,27 @@ describe("L1MessageQueue", async () => {
|
||||
|
||||
context("#appendCrossDomainMessage", async () => {
|
||||
it("should revert, when non-messenger call", async () => {
|
||||
await expect(queue.connect(signer).appendCrossDomainMessage(constants.AddressZero, 0, "0x")).to.revertedWith(
|
||||
await expect(queue.connect(signer).appendCrossDomainMessage(ZeroAddress, 0, "0x")).to.revertedWith(
|
||||
"Only callable by the L1ScrollMessenger"
|
||||
);
|
||||
});
|
||||
|
||||
it("should revert, when exceed maxGasLimit", async () => {
|
||||
await expect(
|
||||
queue.connect(messenger).appendCrossDomainMessage(constants.AddressZero, 10000001, "0x")
|
||||
).to.revertedWith("Gas limit must not exceed maxGasLimit");
|
||||
await expect(queue.connect(messenger).appendCrossDomainMessage(ZeroAddress, 10000001, "0x")).to.revertedWith(
|
||||
"Gas limit must not exceed maxGasLimit"
|
||||
);
|
||||
});
|
||||
|
||||
it("should revert, when below intrinsic gas", async () => {
|
||||
await expect(queue.connect(messenger).appendCrossDomainMessage(constants.AddressZero, 0, "0x")).to.revertedWith(
|
||||
await expect(queue.connect(messenger).appendCrossDomainMessage(ZeroAddress, 0, "0x")).to.revertedWith(
|
||||
"Insufficient gas limit, must be above intrinsic gas"
|
||||
);
|
||||
});
|
||||
|
||||
it("should succeed", async () => {
|
||||
expect(await queue.nextCrossDomainMessageIndex()).to.eq(constants.Zero);
|
||||
expect(await queue.nextCrossDomainMessageIndex()).to.eq(0n);
|
||||
const sender = getAddress(
|
||||
BigNumber.from(messenger.address)
|
||||
.add("0x1111000000000000000000000000000000001111")
|
||||
.mod(BigNumber.from(2).pow(160))
|
||||
.toHexString()
|
||||
toBeHex((toBigInt(messenger.address) + toBigInt("0x1111000000000000000000000000000000001111")) % 2n ** 160n)
|
||||
.slice(2)
|
||||
.padStart(40, "0")
|
||||
);
|
||||
@@ -190,7 +178,7 @@ describe("L1MessageQueue", async () => {
|
||||
await expect(queue.connect(messenger).appendCrossDomainMessage(signer.address, 100000, "0x01"))
|
||||
.to.emit(queue, "QueueTransaction")
|
||||
.withArgs(sender, signer.address, 0, 0, 100000, "0x01");
|
||||
expect(await queue.nextCrossDomainMessageIndex()).to.eq(constants.One);
|
||||
expect(await queue.nextCrossDomainMessageIndex()).to.eq(1n);
|
||||
expect(await queue.getCrossDomainMessage(0)).to.eq(hash);
|
||||
});
|
||||
});
|
||||
@@ -198,30 +186,30 @@ describe("L1MessageQueue", async () => {
|
||||
context("#appendEnforcedTransaction", async () => {
|
||||
it("should revert, when non-gateway call", async () => {
|
||||
await expect(
|
||||
queue.connect(signer).appendEnforcedTransaction(signer.address, constants.AddressZero, 0, 0, "0x")
|
||||
queue.connect(signer).appendEnforcedTransaction(signer.address, ZeroAddress, 0, 0, "0x")
|
||||
).to.revertedWith("Only callable by the EnforcedTxGateway");
|
||||
});
|
||||
|
||||
it("should revert, when sender is not EOA", async () => {
|
||||
await expect(
|
||||
queue.connect(gateway).appendEnforcedTransaction(queue.address, constants.AddressZero, 0, 0, "0x")
|
||||
queue.connect(gateway).appendEnforcedTransaction(queue.getAddress(), ZeroAddress, 0, 0, "0x")
|
||||
).to.revertedWith("only EOA");
|
||||
});
|
||||
|
||||
it("should revert, when exceed maxGasLimit", async () => {
|
||||
await expect(
|
||||
queue.connect(gateway).appendEnforcedTransaction(signer.address, constants.AddressZero, 0, 10000001, "0x")
|
||||
queue.connect(gateway).appendEnforcedTransaction(signer.address, ZeroAddress, 0, 10000001, "0x")
|
||||
).to.revertedWith("Gas limit must not exceed maxGasLimit");
|
||||
});
|
||||
|
||||
it("should revert, when below intrinsic gas", async () => {
|
||||
await expect(
|
||||
queue.connect(gateway).appendEnforcedTransaction(signer.address, constants.AddressZero, 0, 0, "0x")
|
||||
queue.connect(gateway).appendEnforcedTransaction(signer.address, ZeroAddress, 0, 0, "0x")
|
||||
).to.revertedWith("Insufficient gas limit, must be above intrinsic gas");
|
||||
});
|
||||
|
||||
it("should succeed", async () => {
|
||||
expect(await queue.nextCrossDomainMessageIndex()).to.eq(constants.Zero);
|
||||
expect(await queue.nextCrossDomainMessageIndex()).to.eq(0n);
|
||||
const sender = signer.address;
|
||||
const hash = await queue.computeTransactionHash(sender, 0, 200, signer.address, 100000, "0x01");
|
||||
await expect(
|
||||
@@ -229,7 +217,7 @@ describe("L1MessageQueue", async () => {
|
||||
)
|
||||
.to.emit(queue, "QueueTransaction")
|
||||
.withArgs(sender, signer.address, 200, 0, 100000, "0x01");
|
||||
expect(await queue.nextCrossDomainMessageIndex()).to.eq(constants.One);
|
||||
expect(await queue.nextCrossDomainMessageIndex()).to.eq(1n);
|
||||
expect(await queue.getCrossDomainMessage(0)).to.eq(hash);
|
||||
});
|
||||
});
|
||||
@@ -254,7 +242,7 @@ describe("L1MessageQueue", async () => {
|
||||
it("should succeed", async () => {
|
||||
// append 512 messages
|
||||
for (let i = 0; i < 256 * 2; i++) {
|
||||
await queue.connect(messenger).appendCrossDomainMessage(constants.AddressZero, 1000000, "0x");
|
||||
await queue.connect(messenger).appendCrossDomainMessage(ZeroAddress, 1000000, "0x");
|
||||
}
|
||||
|
||||
// pop 50 messages with no skip
|
||||
@@ -292,17 +280,12 @@ describe("L1MessageQueue", async () => {
|
||||
}
|
||||
|
||||
// pop 256 messages with random skip
|
||||
const bitmap = BigNumber.from("0x496525059c3f33758d17030403e45afe067b8a0ae1317cda0487fd2932cbea1a");
|
||||
const bitmap = toBigInt("0x496525059c3f33758d17030403e45afe067b8a0ae1317cda0487fd2932cbea1a");
|
||||
const tx = await queue.connect(scrollChain).popCrossDomainMessage(80, 256, bitmap);
|
||||
await expect(tx).to.emit(queue, "DequeueTransaction").withArgs(80, 256, bitmap);
|
||||
console.log("gas used:", (await tx.wait()).gasUsed.toString());
|
||||
console.log("gas used:", (await tx.wait())!.gasUsed.toString());
|
||||
for (let i = 80; i < 80 + 256; i++) {
|
||||
expect(await queue.isMessageSkipped(i)).to.eq(
|
||||
bitmap
|
||||
.shr(i - 80)
|
||||
.and(1)
|
||||
.eq(1)
|
||||
);
|
||||
expect(await queue.isMessageSkipped(i)).to.eq(((bitmap >> toBigInt(i - 80)) & 1n) === 1n);
|
||||
expect(await queue.isMessageDropped(i)).to.eq(false);
|
||||
}
|
||||
});
|
||||
@@ -314,39 +297,39 @@ describe("L1MessageQueue", async () => {
|
||||
it.skip(`should succeed on random tests, pop three times each with ${count1} ${count2} ${count3} msgs`, async () => {
|
||||
// append count1 + count2 + count3 messages
|
||||
for (let i = 0; i < count1 + count2 + count3; i++) {
|
||||
await queue.connect(messenger).appendCrossDomainMessage(constants.AddressZero, 1000000, "0x");
|
||||
await queue.connect(messenger).appendCrossDomainMessage(ZeroAddress, 1000000, "0x");
|
||||
}
|
||||
|
||||
// first pop `count1` messages
|
||||
const bitmap1 = BigNumber.from(randomBytes(32));
|
||||
const bitmap1 = toBigInt(randomBytes(32));
|
||||
let tx = await queue.connect(scrollChain).popCrossDomainMessage(0, count1, bitmap1);
|
||||
await expect(tx)
|
||||
.to.emit(queue, "DequeueTransaction")
|
||||
.withArgs(0, count1, bitmap1.and(constants.One.shl(count1).sub(1)));
|
||||
.withArgs(0, count1, bitmap1 & ((1n << toBigInt(count1)) - 1n));
|
||||
for (let i = 0; i < count1; i++) {
|
||||
expect(await queue.isMessageSkipped(i)).to.eq(bitmap1.shr(i).and(1).eq(1));
|
||||
expect(await queue.isMessageSkipped(i)).to.eq(((bitmap1 >> toBigInt(i)) & 1n) === 1n);
|
||||
expect(await queue.isMessageDropped(i)).to.eq(false);
|
||||
}
|
||||
|
||||
// then pop `count2` messages
|
||||
const bitmap2 = BigNumber.from(randomBytes(32));
|
||||
const bitmap2 = toBigInt(randomBytes(32));
|
||||
tx = await queue.connect(scrollChain).popCrossDomainMessage(count1, count2, bitmap2);
|
||||
await expect(tx)
|
||||
.to.emit(queue, "DequeueTransaction")
|
||||
.withArgs(count1, count2, bitmap2.and(constants.One.shl(count2).sub(1)));
|
||||
.withArgs(count1, count2, bitmap2 & ((1n << toBigInt(count2)) - 1n));
|
||||
for (let i = 0; i < count2; i++) {
|
||||
expect(await queue.isMessageSkipped(i + count1)).to.eq(bitmap2.shr(i).and(1).eq(1));
|
||||
expect(await queue.isMessageSkipped(i + count1)).to.eq(((bitmap2 >> toBigInt(i)) & 1n) === 1n);
|
||||
expect(await queue.isMessageDropped(i + count1)).to.eq(false);
|
||||
}
|
||||
|
||||
// last pop `count3` messages
|
||||
const bitmap3 = BigNumber.from(randomBytes(32));
|
||||
const bitmap3 = toBigInt(randomBytes(32));
|
||||
tx = await queue.connect(scrollChain).popCrossDomainMessage(count1 + count2, count3, bitmap3);
|
||||
await expect(tx)
|
||||
.to.emit(queue, "DequeueTransaction")
|
||||
.withArgs(count1 + count2, count3, bitmap3.and(constants.One.shl(count3).sub(1)));
|
||||
.withArgs(count1 + count2, count3, bitmap3 & ((1n << toBigInt(count3)) - 1n));
|
||||
for (let i = 0; i < count3; i++) {
|
||||
expect(await queue.isMessageSkipped(i + count1 + count2)).to.eq(bitmap3.shr(i).and(1).eq(1));
|
||||
expect(await queue.isMessageSkipped(i + count1 + count2)).to.eq(((bitmap3 >> toBigInt(i)) & 1n) === 1n);
|
||||
expect(await queue.isMessageDropped(i + count1 + count2)).to.eq(false);
|
||||
}
|
||||
});
|
||||
@@ -365,7 +348,7 @@ describe("L1MessageQueue", async () => {
|
||||
it("should revert, when drop non-skipped message", async () => {
|
||||
// append 10 messages
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await queue.connect(messenger).appendCrossDomainMessage(constants.AddressZero, 1000000, "0x");
|
||||
await queue.connect(messenger).appendCrossDomainMessage(ZeroAddress, 1000000, "0x");
|
||||
}
|
||||
// pop 5 messages with no skip
|
||||
await expect(queue.connect(scrollChain).popCrossDomainMessage(0, 5, 0))
|
||||
@@ -390,7 +373,7 @@ describe("L1MessageQueue", async () => {
|
||||
it("should succeed", async () => {
|
||||
// append 10 messages
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await queue.connect(messenger).appendCrossDomainMessage(constants.AddressZero, 1000000, "0x");
|
||||
await queue.connect(messenger).appendCrossDomainMessage(ZeroAddress, 1000000, "0x");
|
||||
}
|
||||
// pop 10 messages, all skipped
|
||||
await expect(queue.connect(scrollChain).popCrossDomainMessage(0, 10, 0x3ff))
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
/* eslint-disable node/no-unpublished-import */
|
||||
/* eslint-disable node/no-missing-import */
|
||||
import { expect } from "chai";
|
||||
import { concat } from "ethers/lib/utils";
|
||||
import { concat } from "ethers";
|
||||
import { ethers } from "hardhat";
|
||||
|
||||
import { MockPatriciaMerkleTrieVerifier } from "../typechain";
|
||||
|
||||
interface ITestConfig {
|
||||
@@ -121,7 +122,6 @@ describe("PatriciaMerkleTrieVerifier", async () => {
|
||||
|
||||
const MockPatriciaMerkleTrieVerifier = await ethers.getContractFactory("MockPatriciaMerkleTrieVerifier", deployer);
|
||||
verifier = await MockPatriciaMerkleTrieVerifier.deploy();
|
||||
await verifier.deployed();
|
||||
});
|
||||
|
||||
for (const test of testcases) {
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
/* eslint-disable node/no-unpublished-import */
|
||||
import { expect } from "chai";
|
||||
import { randomBytes } from "crypto";
|
||||
import { BigNumber, Contract } from "ethers";
|
||||
import { ethers } from "hardhat";
|
||||
import { Contract, toBigInt } from "ethers";
|
||||
import fs from "fs";
|
||||
import { ethers } from "hardhat";
|
||||
|
||||
import PoseidonWithoutDomain from "circomlib/src/poseidon_gencontract";
|
||||
import { generateABI, createCode } from "../scripts/poseidon";
|
||||
@@ -23,12 +23,10 @@ describe("PoseidonHash.spec", async () => {
|
||||
PoseidonWithoutDomain.createCode(2),
|
||||
deployer
|
||||
);
|
||||
poseidonCircom = await PoseidonWithoutDomainFactory.deploy();
|
||||
await poseidonCircom.deployed();
|
||||
poseidonCircom = (await PoseidonWithoutDomainFactory.deploy()) as Contract;
|
||||
|
||||
const PoseidonWithDomainFactory = new ethers.ContractFactory(generateABI(2), createCode(2), deployer);
|
||||
poseidon = await PoseidonWithDomainFactory.deploy();
|
||||
await poseidon.deployed();
|
||||
poseidon = (await PoseidonWithDomainFactory.deploy()) as Contract;
|
||||
});
|
||||
|
||||
it("should succeed on zero inputs", async () => {
|
||||
@@ -40,8 +38,8 @@ describe("PoseidonHash.spec", async () => {
|
||||
it("should succeed on random inputs", async () => {
|
||||
for (let bytes = 1; bytes <= 32; ++bytes) {
|
||||
for (let i = 0; i < 5; ++i) {
|
||||
const a = randomBytes(bytes);
|
||||
const b = randomBytes(bytes);
|
||||
const a = toBigInt(randomBytes(bytes));
|
||||
const b = toBigInt(randomBytes(bytes));
|
||||
expect(await poseidonCircom["poseidon(uint256[2])"]([a, b])).to.eq(
|
||||
await poseidon["poseidon(uint256[2],uint256)"]([a, b], 0)
|
||||
);
|
||||
@@ -58,31 +56,20 @@ describe("PoseidonHash.spec", async () => {
|
||||
|
||||
// test against with scroll's go implementation.
|
||||
context("domain = nonzero", async () => {
|
||||
let poseidonCircom: Contract;
|
||||
let poseidon: Contract;
|
||||
|
||||
beforeEach(async () => {
|
||||
const [deployer] = await ethers.getSigners();
|
||||
|
||||
const PoseidonWithoutDomainFactory = new ethers.ContractFactory(
|
||||
PoseidonWithoutDomain.generateABI(2),
|
||||
PoseidonWithoutDomain.createCode(2),
|
||||
deployer
|
||||
);
|
||||
poseidonCircom = await PoseidonWithoutDomainFactory.deploy();
|
||||
await poseidonCircom.deployed();
|
||||
|
||||
const PoseidonWithDomainFactory = new ethers.ContractFactory(generateABI(2), createCode(2), deployer);
|
||||
poseidon = await PoseidonWithDomainFactory.deploy();
|
||||
await poseidon.deployed();
|
||||
poseidon = (await PoseidonWithDomainFactory.deploy()) as Contract;
|
||||
});
|
||||
|
||||
it("should succeed on zero inputs", async () => {
|
||||
expect(await poseidon["poseidon(uint256[2],uint256)"]([0, 0], 6)).to.eq(
|
||||
BigNumber.from("17848312925884193353134534408113064827548730776291701343555436351962284922129")
|
||||
toBigInt("17848312925884193353134534408113064827548730776291701343555436351962284922129")
|
||||
);
|
||||
expect(await poseidon["poseidon(uint256[2],uint256)"]([0, 0], 7)).to.eq(
|
||||
BigNumber.from("20994231331856095272861976502721128670019193481895476667943874333621461724676")
|
||||
toBigInt("20994231331856095272861976502721128670019193481895476667943874333621461724676")
|
||||
);
|
||||
});
|
||||
|
||||
@@ -90,7 +77,7 @@ describe("PoseidonHash.spec", async () => {
|
||||
const lines = String(fs.readFileSync("./integration-test/testdata/poseidon_hash_with_domain.data")).split("\n");
|
||||
for (const line of lines) {
|
||||
const [domain, a, b, hash] = line.split(" ");
|
||||
expect(await poseidon["poseidon(uint256[2],uint256)"]([a, b], domain)).to.eq(BigNumber.from(hash));
|
||||
expect(await poseidon["poseidon(uint256[2],uint256)"]([a, b], domain)).to.eq(toBigInt(hash));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
162
contracts/integration-test/ScrollChain.blob.spec.ts
Normal file
162
contracts/integration-test/ScrollChain.blob.spec.ts
Normal file
@@ -0,0 +1,162 @@
|
||||
/* eslint-disable node/no-unpublished-import */
|
||||
/* eslint-disable node/no-missing-import */
|
||||
import { ZeroAddress } from "ethers";
|
||||
import { ethers } from "hardhat";
|
||||
|
||||
import { ScrollChain, L1MessageQueue } from "../typechain";
|
||||
import { HardhatEthersSigner } from "@nomicfoundation/hardhat-ethers/signers";
|
||||
import { randomBytes } from "crypto";
|
||||
import { expect } from "chai";
|
||||
|
||||
describe("ScrollChain.blob", async () => {
|
||||
let deployer: HardhatEthersSigner;
|
||||
let signer: HardhatEthersSigner;
|
||||
|
||||
let queue: L1MessageQueue;
|
||||
let chain: ScrollChain;
|
||||
|
||||
beforeEach(async () => {
|
||||
[deployer, signer] = await ethers.getSigners();
|
||||
|
||||
const EmptyContract = await ethers.getContractFactory("EmptyContract", deployer);
|
||||
const empty = await EmptyContract.deploy();
|
||||
|
||||
const ProxyAdmin = await ethers.getContractFactory("ProxyAdmin", deployer);
|
||||
const admin = await ProxyAdmin.deploy();
|
||||
|
||||
const TransparentUpgradeableProxy = await ethers.getContractFactory("TransparentUpgradeableProxy", deployer);
|
||||
const queueProxy = await TransparentUpgradeableProxy.deploy(empty.getAddress(), admin.getAddress(), "0x");
|
||||
const chainProxy = await TransparentUpgradeableProxy.deploy(empty.getAddress(), admin.getAddress(), "0x");
|
||||
|
||||
const L1MessageQueue = await ethers.getContractFactory("L1MessageQueue", deployer);
|
||||
const queueImpl = await L1MessageQueue.deploy(deployer.address, chainProxy.getAddress(), deployer.address);
|
||||
await admin.upgrade(queueProxy.getAddress(), queueImpl.getAddress());
|
||||
|
||||
const ScrollChain = await ethers.getContractFactory("ScrollChain", deployer);
|
||||
const chainImpl = await ScrollChain.deploy(0, queueProxy.getAddress(), deployer.address);
|
||||
await admin.upgrade(chainProxy.getAddress(), chainImpl.getAddress());
|
||||
|
||||
queue = await ethers.getContractAt("L1MessageQueue", await queueProxy.getAddress(), deployer);
|
||||
chain = await ethers.getContractAt("ScrollChain", await chainProxy.getAddress(), deployer);
|
||||
|
||||
await chain.initialize(queue.getAddress(), ZeroAddress, 100);
|
||||
await chain.addSequencer(deployer.address);
|
||||
await chain.addProver(deployer.address);
|
||||
await queue.initialize(deployer.address, chain.getAddress(), deployer.address, deployer.address, 10000000);
|
||||
});
|
||||
|
||||
context("commit batch", async () => {
|
||||
let batchHeader0: Uint8Array;
|
||||
|
||||
beforeEach(async () => {
|
||||
// import 10 L1 messages
|
||||
for (let i = 0; i < 10; i++) {
|
||||
queue.appendCrossDomainMessage(deployer.address, 1000000, "0x");
|
||||
}
|
||||
|
||||
// import genesis batch first
|
||||
batchHeader0 = new Uint8Array(89);
|
||||
batchHeader0[25] = 1;
|
||||
await chain.importGenesisBatch(batchHeader0, randomBytes(32));
|
||||
});
|
||||
|
||||
it("should revert when caller is not sequencer", async () => {
|
||||
await expect(chain.connect(signer).commitBatch(1, batchHeader0, [], "0x")).to.revertedWithCustomError(
|
||||
chain,
|
||||
"ErrorCallerIsNotSequencer"
|
||||
);
|
||||
});
|
||||
|
||||
it("should revert when batch is empty", async () => {
|
||||
await expect(chain.commitBatch(1, batchHeader0, [], "0x")).to.revertedWithCustomError(chain, "ErrorBatchIsEmpty");
|
||||
});
|
||||
|
||||
it("should revert when batch header length too small", async () => {
|
||||
const header = new Uint8Array(120);
|
||||
header[0] = 1;
|
||||
await expect(chain.commitBatch(1, header, ["0x"], "0x")).to.revertedWithCustomError(
|
||||
chain,
|
||||
"ErrorBatchHeaderLengthTooSmall"
|
||||
);
|
||||
});
|
||||
|
||||
it("should revert when wrong bitmap length", async () => {
|
||||
const header = new Uint8Array(122);
|
||||
header[0] = 1;
|
||||
await expect(chain.commitBatch(1, header, ["0x"], "0x")).to.revertedWithCustomError(
|
||||
chain,
|
||||
"ErrorIncorrectBitmapLength"
|
||||
);
|
||||
});
|
||||
|
||||
it("should revert when incorrect parent batch hash", async () => {
|
||||
batchHeader0[25] = 2;
|
||||
await expect(chain.commitBatch(1, batchHeader0, ["0x"], "0x")).to.revertedWithCustomError(
|
||||
chain,
|
||||
"ErrorIncorrectBatchHash"
|
||||
);
|
||||
batchHeader0[25] = 1;
|
||||
});
|
||||
|
||||
it("should revert when ErrorInvalidBatchHeaderVersion", async () => {
|
||||
const header = new Uint8Array(121);
|
||||
header[0] = 2;
|
||||
await expect(chain.commitBatch(1, header, ["0x"], "0x")).to.revertedWithCustomError(
|
||||
chain,
|
||||
"ErrorInvalidBatchHeaderVersion"
|
||||
);
|
||||
await expect(chain.commitBatch(2, batchHeader0, ["0x"], "0x")).to.revertedWithCustomError(
|
||||
chain,
|
||||
"ErrorInvalidBatchHeaderVersion"
|
||||
);
|
||||
});
|
||||
|
||||
it("should revert when ErrorNoBlobFound", async () => {
|
||||
await expect(chain.commitBatch(1, batchHeader0, ["0x"], "0x")).to.revertedWithCustomError(
|
||||
chain,
|
||||
"ErrorNoBlobFound"
|
||||
);
|
||||
});
|
||||
|
||||
/* Hardhat doesn't have support for EIP4844 yet.
|
||||
const makeTransaction = async (data: string, value: bigint, blobVersionedHashes: Array<string>) => {
|
||||
const tx = new Transaction();
|
||||
tx.type = 3;
|
||||
tx.to = await chain.getAddress();
|
||||
tx.data = data;
|
||||
tx.nonce = await deployer.getNonce();
|
||||
tx.gasLimit = 1000000;
|
||||
tx.maxPriorityFeePerGas = (await ethers.provider.getFeeData()).maxPriorityFeePerGas;
|
||||
tx.maxFeePerGas = (await ethers.provider.getFeeData()).maxFeePerGas;
|
||||
tx.value = value;
|
||||
tx.chainId = (await ethers.provider.getNetwork()).chainId;
|
||||
tx.maxFeePerBlobGas = ethers.parseUnits("1", "gwei");
|
||||
tx.blobVersionedHashes = blobVersionedHashes;
|
||||
return tx;
|
||||
};
|
||||
|
||||
it("should revert when ErrorFoundMultipleBlob", async () => {
|
||||
const data = chain.interface.encodeFunctionData("commitBatch", [1, batchHeader0, ["0x"], "0x"]);
|
||||
const tx = await makeTransaction(data, 0n, [ZeroHash, ZeroHash]);
|
||||
const signature = await deployer.signMessage(tx.unsignedHash);
|
||||
tx.signature = Signature.from(signature);
|
||||
const r = await ethers.provider.broadcastTransaction(tx.serialized);
|
||||
await expect(r).to.revertedWithCustomError(chain, "ErrorFoundMultipleBlob");
|
||||
});
|
||||
|
||||
it("should revert when ErrorNoBlockInChunk", async () => {});
|
||||
|
||||
it("should revert when ErrorIncorrectChunkLength", async () => {});
|
||||
|
||||
it("should revert when ErrorLastL1MessageSkipped", async () => {});
|
||||
|
||||
it("should revert when ErrorNumTxsLessThanNumL1Msgs", async () => {});
|
||||
|
||||
it("should revert when ErrorTooManyTxsInOneChunk", async () => {});
|
||||
|
||||
it("should revert when ErrorIncorrectBitmapLength", async () => {});
|
||||
|
||||
it("should succeed", async () => {});
|
||||
*/
|
||||
});
|
||||
});
|
||||
@@ -1,8 +1,8 @@
|
||||
/* eslint-disable node/no-unpublished-import */
|
||||
/* eslint-disable node/no-missing-import */
|
||||
import { concat } from "ethers/lib/utils";
|
||||
import { constants } from "ethers";
|
||||
import { ZeroAddress, concat, getBytes } from "ethers";
|
||||
import { ethers } from "hardhat";
|
||||
|
||||
import { ScrollChain, L1MessageQueue } from "../typechain";
|
||||
|
||||
describe("ScrollChain", async () => {
|
||||
@@ -14,40 +14,28 @@ describe("ScrollChain", async () => {
|
||||
|
||||
const EmptyContract = await ethers.getContractFactory("EmptyContract", deployer);
|
||||
const empty = await EmptyContract.deploy();
|
||||
await empty.deployed();
|
||||
|
||||
const ProxyAdmin = await ethers.getContractFactory("ProxyAdmin", deployer);
|
||||
const admin = await ProxyAdmin.deploy();
|
||||
await admin.deployed();
|
||||
|
||||
const TransparentUpgradeableProxy = await ethers.getContractFactory("TransparentUpgradeableProxy", deployer);
|
||||
const queueProxy = await TransparentUpgradeableProxy.deploy(empty.address, admin.address, "0x");
|
||||
await queueProxy.deployed();
|
||||
const chainProxy = await TransparentUpgradeableProxy.deploy(empty.address, admin.address, "0x");
|
||||
await chainProxy.deployed();
|
||||
const queueProxy = await TransparentUpgradeableProxy.deploy(empty.getAddress(), admin.getAddress(), "0x");
|
||||
const chainProxy = await TransparentUpgradeableProxy.deploy(empty.getAddress(), admin.getAddress(), "0x");
|
||||
|
||||
const L1MessageQueue = await ethers.getContractFactory("L1MessageQueue", deployer);
|
||||
const queueImpl = await L1MessageQueue.deploy(constants.AddressZero, chainProxy.address, deployer.address);
|
||||
await queueImpl.deployed();
|
||||
await admin.upgrade(queueProxy.address, queueImpl.address);
|
||||
const queueImpl = await L1MessageQueue.deploy(ZeroAddress, chainProxy.getAddress(), deployer.address);
|
||||
await admin.upgrade(queueProxy.getAddress(), queueImpl.getAddress());
|
||||
|
||||
const ScrollChain = await ethers.getContractFactory("ScrollChain", deployer);
|
||||
const chainImpl = await ScrollChain.deploy(0, queueProxy.address, deployer.address);
|
||||
await chainImpl.deployed();
|
||||
await admin.upgrade(chainProxy.address, chainImpl.address);
|
||||
const chainImpl = await ScrollChain.deploy(0, queueProxy.getAddress(), deployer.address);
|
||||
await admin.upgrade(chainProxy.getAddress(), chainImpl.getAddress());
|
||||
|
||||
queue = await ethers.getContractAt("L1MessageQueue", queueProxy.address, deployer);
|
||||
chain = await ethers.getContractAt("ScrollChain", chainProxy.address, deployer);
|
||||
queue = await ethers.getContractAt("L1MessageQueue", await queueProxy.getAddress(), deployer);
|
||||
chain = await ethers.getContractAt("ScrollChain", await chainProxy.getAddress(), deployer);
|
||||
|
||||
await chain.initialize(queue.address, constants.AddressZero, 100);
|
||||
await chain.initialize(queue.getAddress(), ZeroAddress, 100);
|
||||
await chain.addSequencer(deployer.address);
|
||||
await queue.initialize(
|
||||
constants.AddressZero,
|
||||
chain.address,
|
||||
constants.AddressZero,
|
||||
constants.AddressZero,
|
||||
10000000
|
||||
);
|
||||
await queue.initialize(ZeroAddress, chain.getAddress(), ZeroAddress, ZeroAddress, 10000000);
|
||||
});
|
||||
|
||||
// @note skip this benchmark tests
|
||||
@@ -82,12 +70,12 @@ describe("ScrollChain", async () => {
|
||||
for (let i = 0; i < numChunks; i++) {
|
||||
const txsInChunk: Array<Uint8Array> = [];
|
||||
for (let j = 0; j < numBlocks; j++) {
|
||||
txsInChunk.push(concat(txs));
|
||||
txsInChunk.push(getBytes(concat(txs)));
|
||||
}
|
||||
chunks.push(concat([chunk, concat(txsInChunk)]));
|
||||
chunks.push(getBytes(concat([chunk, concat(txsInChunk)])));
|
||||
}
|
||||
|
||||
const estimateGas = await chain.estimateGas.commitBatch(0, batchHeader0, chunks, "0x");
|
||||
const estimateGas = await chain.commitBatch.estimateGas(0, batchHeader0, chunks, "0x");
|
||||
console.log(
|
||||
`${numChunks}`,
|
||||
`${numBlocks}`,
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
/* eslint-disable node/no-unpublished-import */
|
||||
/* eslint-disable node/no-missing-import */
|
||||
import { HardhatEthersSigner } from "@nomicfoundation/hardhat-ethers/signers";
|
||||
import { expect } from "chai";
|
||||
import { hexlify } from "ethers/lib/utils";
|
||||
import { ethers } from "hardhat";
|
||||
import { ZkEvmVerifierV1 } from "../typechain";
|
||||
import { SignerWithAddress } from "@nomiclabs/hardhat-ethers/signers";
|
||||
import { hexlify } from "ethers";
|
||||
import fs from "fs";
|
||||
import { ethers } from "hardhat";
|
||||
|
||||
import { ZkEvmVerifierV1 } from "../typechain";
|
||||
|
||||
describe("ZkEvmVerifierV1", async () => {
|
||||
let deployer: SignerWithAddress;
|
||||
let deployer: HardhatEthersSigner;
|
||||
|
||||
let zkEvmVerifier: ZkEvmVerifierV1;
|
||||
|
||||
@@ -20,8 +21,7 @@ describe("ZkEvmVerifierV1", async () => {
|
||||
const receipt = await tx.wait();
|
||||
|
||||
const ZkEvmVerifierV1 = await ethers.getContractFactory("ZkEvmVerifierV1", deployer);
|
||||
zkEvmVerifier = await ZkEvmVerifierV1.deploy(receipt.contractAddress);
|
||||
await zkEvmVerifier.deployed();
|
||||
zkEvmVerifier = await ZkEvmVerifierV1.deploy(receipt!.contractAddress!);
|
||||
});
|
||||
|
||||
it("should succeed", async () => {
|
||||
@@ -37,7 +37,7 @@ describe("ZkEvmVerifierV1", async () => {
|
||||
|
||||
// verify ok
|
||||
await zkEvmVerifier.verify(proof, publicInputHash);
|
||||
console.log("Gas Usage:", (await zkEvmVerifier.estimateGas.verify(proof, publicInputHash)).toString());
|
||||
console.log("Gas Usage:", (await zkEvmVerifier.verify.estimateGas(proof, publicInputHash)).toString());
|
||||
|
||||
// verify failed
|
||||
await expect(zkEvmVerifier.verify(proof, publicInputHash.reverse())).to.reverted;
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/* eslint-disable node/no-unpublished-import */
|
||||
/* eslint-disable node/no-missing-import */
|
||||
import { expect } from "chai";
|
||||
import { concat } from "ethers/lib/utils";
|
||||
import { ethers } from "hardhat";
|
||||
import { MockZkTrieVerifier } from "../typechain";
|
||||
|
||||
import { generateABI, createCode } from "../scripts/poseidon";
|
||||
import { MockZkTrieVerifier } from "../typechain";
|
||||
import { concat } from "ethers";
|
||||
|
||||
const chars = "0123456789abcdef";
|
||||
|
||||
@@ -273,13 +273,10 @@ describe("ZkTrieVerifier", async () => {
|
||||
const [deployer] = await ethers.getSigners();
|
||||
|
||||
const PoseidonHashWithDomainFactory = new ethers.ContractFactory(generateABI(2), createCode(2), deployer);
|
||||
|
||||
const poseidon = await PoseidonHashWithDomainFactory.deploy();
|
||||
await poseidon.deployed();
|
||||
|
||||
const MockZkTrieVerifier = await ethers.getContractFactory("MockZkTrieVerifier", deployer);
|
||||
verifier = await MockZkTrieVerifier.deploy(poseidon.address);
|
||||
await verifier.deployed();
|
||||
verifier = await MockZkTrieVerifier.deploy(poseidon.getAddress());
|
||||
});
|
||||
|
||||
const shouldRevert = async (test: ITestConfig, reason: string, extra?: string) => {
|
||||
@@ -456,7 +453,7 @@ describe("ZkTrieVerifier", async () => {
|
||||
it("should revert, when InvalidAccountKeyPreimage", async () => {
|
||||
const test = testcases[0];
|
||||
const index = test.accountProof.length - 2;
|
||||
const correct = test.accountProof[index];
|
||||
const correct = test.accountProof[index].slice();
|
||||
for (const p of [398, 438]) {
|
||||
const v = correct[p];
|
||||
for (let b = 0; b < 3; ++b) {
|
||||
@@ -471,7 +468,7 @@ describe("ZkTrieVerifier", async () => {
|
||||
it("should revert, when InvalidProofMagicBytes", async () => {
|
||||
const test = testcases[0];
|
||||
let index = test.accountProof.length - 1;
|
||||
let correct = test.accountProof[index];
|
||||
let correct = test.accountProof[index].slice();
|
||||
for (const p of [2, 32, 91]) {
|
||||
const v = correct[p];
|
||||
for (let b = 0; b < 3; ++b) {
|
||||
@@ -483,7 +480,7 @@ describe("ZkTrieVerifier", async () => {
|
||||
}
|
||||
|
||||
index = test.storageProof.length - 1;
|
||||
correct = test.storageProof[index];
|
||||
correct = test.storageProof[index].slice();
|
||||
for (const p of [2, 32, 91]) {
|
||||
const v = correct[p];
|
||||
for (let b = 0; b < 3; ++b) {
|
||||
@@ -497,13 +494,14 @@ describe("ZkTrieVerifier", async () => {
|
||||
|
||||
it("should revert, when InvalidAccountLeafNodeHash", async () => {
|
||||
const test = testcases[0];
|
||||
const correct = test.storageProof.slice();
|
||||
test.storageProof = [
|
||||
"0x05",
|
||||
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449",
|
||||
];
|
||||
const correct = test.accountProof[test.accountProof.length - 2];
|
||||
// change nonce
|
||||
test.accountProof[test.accountProof.length - 2] = correct.replace(
|
||||
"0x0420e9fb498ff9c35246d527da24aa1710d2cc9b055ecf9a95a8a2a11d3d836cdf050800000",
|
||||
"0x0420e9fb498ff9c35246d527da24aa1710d2cc9b055ecf9a95a8a2a11d3d836cdf050800001"
|
||||
);
|
||||
await shouldRevert(test, "InvalidAccountLeafNodeHash");
|
||||
test.storageProof = correct;
|
||||
test.accountProof[test.accountProof.length - 2] = correct;
|
||||
});
|
||||
|
||||
it("should revert, when InvalidStorageLeafNodeType", async () => {
|
||||
|
||||
@@ -16,44 +16,47 @@
|
||||
"prepare": "cd .. && husky install contracts/.husky"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@nomiclabs/hardhat-ethers": "^2.0.0",
|
||||
"@nomiclabs/hardhat-etherscan": "^3.0.0",
|
||||
"@nomiclabs/hardhat-waffle": "^2.0.0",
|
||||
"@nomicfoundation/hardhat-chai-matchers": "^2.0.6",
|
||||
"@nomicfoundation/hardhat-ethers": "^3.0.5",
|
||||
"@nomicfoundation/hardhat-verify": "^2.0.5",
|
||||
"@primitivefi/hardhat-dodoc": "^0.2.3",
|
||||
"@typechain/ethers-v5": "^7.0.1",
|
||||
"@typechain/hardhat": "^2.3.0",
|
||||
"@typechain/ethers-v6": "^0.5.1",
|
||||
"@typechain/hardhat": "^9.1.0",
|
||||
"@types/chai": "^4.2.21",
|
||||
"@types/edit-json-file": "^1.7.0",
|
||||
"@types/mocha": "^9.0.0",
|
||||
"@types/node": "^12.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^4.29.1",
|
||||
"@typescript-eslint/parser": "^4.29.1",
|
||||
"@types/node": "^20.11.27",
|
||||
"@typescript-eslint/eslint-plugin": "^7.2.0",
|
||||
"@typescript-eslint/parser": "^7.2.0",
|
||||
"chai": "^4.2.0",
|
||||
"circom": "^0.5.46",
|
||||
"circomlib": "^0.5.0",
|
||||
"dotenv": "^10.0.0",
|
||||
"edit-json-file": "^1.7.0",
|
||||
"eslint": "^7.29.0",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-prettier": "^8.3.0",
|
||||
"eslint-config-standard": "^16.0.3",
|
||||
"eslint-config-standard": "^17.1.0",
|
||||
"eslint-plugin-import": "^2.23.4",
|
||||
"eslint-plugin-n": "^16.6.2",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint-plugin-prettier": "^3.4.0",
|
||||
"eslint-plugin-promise": "^5.1.0",
|
||||
"eslint-plugin-promise": "^6.1.1",
|
||||
"ethereum-waffle": "^3.0.0",
|
||||
"ethers": "^5.0.0",
|
||||
"hardhat": "^2.9.3",
|
||||
"ethers": "^6.11.1",
|
||||
"hardhat": "^2.22.0",
|
||||
"hardhat-gas-reporter": "^1.0.4",
|
||||
"husky": "^8.0.1",
|
||||
"lint-staged": "^13.0.3",
|
||||
"lodash": "^4.17.21",
|
||||
"prettier": "^2.3.2",
|
||||
"prettier-plugin-solidity": "^1.0.0-beta.13",
|
||||
"solhint": "^3.3.6",
|
||||
"solidity-coverage": "^0.7.16",
|
||||
"solidity-coverage": "^0.8.11",
|
||||
"squirrelly": "8.0.8",
|
||||
"toml": "^3.0.0",
|
||||
"ts-node": "^10.1.0",
|
||||
"typechain": "^5.1.2",
|
||||
"typescript": "^4.5.2"
|
||||
"typechain": "^8.3.2",
|
||||
"typescript": "^5.4.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@openzeppelin/contracts": "^v4.9.3",
|
||||
@@ -63,5 +66,8 @@
|
||||
"*.{js,ts}": "npx eslint --cache --fix",
|
||||
"!(docs/apis/*).md": "prettier --ignore-unknown --write",
|
||||
"*.sol": "prettier --ignore-unknown --write"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10.4.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// SPDX-License-Identifier: UNLICENSED
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
// solhint-disable no-console
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// SPDX-License-Identifier: UNLICENSED
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
// solhint-disable no-console
|
||||
|
||||
@@ -92,7 +92,11 @@ contract DeployL1BridgeContracts is Script {
|
||||
}
|
||||
|
||||
function deployMultipleVersionRollupVerifier() internal {
|
||||
rollupVerifier = new MultipleVersionRollupVerifier(address(zkEvmVerifierV1));
|
||||
uint256[] memory _versions = new uint256[](1);
|
||||
address[] memory _verifiers = new address[](1);
|
||||
_versions[0] = 0;
|
||||
_verifiers[0] = address(zkEvmVerifierV1);
|
||||
rollupVerifier = new MultipleVersionRollupVerifier(L1_SCROLL_CHAIN_PROXY_ADDR, _versions, _verifiers);
|
||||
|
||||
logAddress("L1_MULTIPLE_VERSION_ROLLUP_VERIFIER_ADDR", address(rollupVerifier));
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// SPDX-License-Identifier: UNLICENSED
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
// solhint-disable no-console
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// SPDX-License-Identifier: UNLICENSED
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {Script} from "forge-std/Script.sol";
|
||||
import {console} from "forge-std/console.sol";
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// SPDX-License-Identifier: UNLICENSED
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
// solhint-disable no-console
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// SPDX-License-Identifier: UNLICENSED
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
// solhint-disable no-console
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// SPDX-License-Identifier: UNLICENSED
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {Script} from "forge-std/Script.sol";
|
||||
import {console} from "forge-std/console.sol";
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// SPDX-License-Identifier: UNLICENSED
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {Script} from "forge-std/Script.sol";
|
||||
import {console} from "forge-std/console.sol";
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// SPDX-License-Identifier: UNLICENSED
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {Script} from "forge-std/Script.sol";
|
||||
import {console} from "forge-std/console.sol";
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// SPDX-License-Identifier: UNLICENSED
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {Script} from "forge-std/Script.sol";
|
||||
|
||||
@@ -96,9 +96,6 @@ contract InitializeL1BridgeContracts is Script {
|
||||
ScrollChain(L1_SCROLL_CHAIN_PROXY_ADDR).addSequencer(L1_COMMIT_SENDER_ADDRESS);
|
||||
ScrollChain(L1_SCROLL_CHAIN_PROXY_ADDR).addProver(L1_FINALIZE_SENDER_ADDRESS);
|
||||
|
||||
// initialize MultipleVersionRollupVerifier
|
||||
MultipleVersionRollupVerifier(L1_MULTIPLE_VERSION_ROLLUP_VERIFIER_ADDR).initialize(L1_SCROLL_CHAIN_PROXY_ADDR);
|
||||
|
||||
// initialize L2GasPriceOracle
|
||||
L2GasPriceOracle(L2_GAS_PRICE_ORACLE_PROXY_ADDR).initialize(
|
||||
21000, // _txGas
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// SPDX-License-Identifier: UNLICENSED
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {Script} from "forge-std/Script.sol";
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// SPDX-License-Identifier: UNLICENSED
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {Script} from "forge-std/Script.sol";
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// SPDX-License-Identifier: UNLICENSED
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {Script} from "forge-std/Script.sol";
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* eslint-disable node/no-missing-import */
|
||||
import { ethers } from "ethers";
|
||||
import { ethers, keccak256 } from "ethers";
|
||||
|
||||
import Contract from "circomlib/src/evmasm";
|
||||
import * as constants from "circomlib/src/poseidon_constants";
|
||||
@@ -90,10 +90,10 @@ export function createCode(nInputs: number) {
|
||||
C.calldataload();
|
||||
C.div();
|
||||
C.dup(0);
|
||||
C.push(ethers.utils.keccak256(ethers.utils.toUtf8Bytes(`poseidon(uint256[${nInputs}],uint256)`)).slice(0, 10)); // poseidon(uint256[n],uint256)
|
||||
C.push(keccak256(ethers.toUtf8Bytes(`poseidon(uint256[${nInputs}],uint256)`)).slice(0, 10)); // poseidon(uint256[n],uint256)
|
||||
C.eq();
|
||||
C.swap(1);
|
||||
C.push(ethers.utils.keccak256(ethers.utils.toUtf8Bytes(`poseidon(bytes32[${nInputs}],bytes32)`)).slice(0, 10)); // poseidon(bytes32[n],bytes32)
|
||||
C.push(keccak256(ethers.toUtf8Bytes(`poseidon(bytes32[${nInputs}],bytes32)`)).slice(0, 10)); // poseidon(bytes32[n],bytes32)
|
||||
C.eq();
|
||||
C.or();
|
||||
C.jmpi("start");
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {TimelockController} from "@openzeppelin/contracts/governance/TimelockController.sol";
|
||||
import {ProxyAdmin} from "@openzeppelin/contracts/proxy/transparent/ProxyAdmin.sol";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity ^0.8.16;
|
||||
pragma solidity ^0.8.24;
|
||||
|
||||
import {IScrollMessenger} from "../libraries/IScrollMessenger.sol";
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {IScrollChain} from "./rollup/IScrollChain.sol";
|
||||
import {IL1MessageQueue} from "./rollup/IL1MessageQueue.sol";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {OwnableUpgradeable} from "@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol";
|
||||
import {ECDSAUpgradeable} from "@openzeppelin/contracts-upgradeable/utils/cryptography/ECDSAUpgradeable.sol";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity ^0.8.16;
|
||||
pragma solidity ^0.8.24;
|
||||
|
||||
/// @title The interface for the ERC1155 cross chain gateway on layer 1.
|
||||
interface IL1ERC1155Gateway {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity ^0.8.16;
|
||||
pragma solidity ^0.8.24;
|
||||
|
||||
interface IL1ERC20Gateway {
|
||||
/**********
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity ^0.8.16;
|
||||
pragma solidity ^0.8.24;
|
||||
|
||||
/// @title The interface for the ERC721 cross chain gateway on layer 1.
|
||||
interface IL1ERC721Gateway {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity ^0.8.16;
|
||||
pragma solidity ^0.8.24;
|
||||
|
||||
interface IL1ETHGateway {
|
||||
/**********
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity ^0.8.16;
|
||||
pragma solidity ^0.8.24;
|
||||
|
||||
import {IL1ETHGateway} from "./IL1ETHGateway.sol";
|
||||
import {IL1ERC20Gateway} from "./IL1ERC20Gateway.sol";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {IERC20Upgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC20/IERC20Upgradeable.sol";
|
||||
import {SafeERC20Upgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC20/utils/SafeERC20Upgradeable.sol";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {IERC1155Upgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC1155/IERC1155Upgradeable.sol";
|
||||
import {ERC1155HolderUpgradeable, ERC1155ReceiverUpgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC1155/utils/ERC1155HolderUpgradeable.sol";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity ^0.8.16;
|
||||
pragma solidity ^0.8.24;
|
||||
|
||||
import {IERC20Upgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC20/IERC20Upgradeable.sol";
|
||||
import {SafeERC20Upgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC20/utils/SafeERC20Upgradeable.sol";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {IERC721Upgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC721/IERC721Upgradeable.sol";
|
||||
import {ERC721HolderUpgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC721/utils/ERC721HolderUpgradeable.sol";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {IL2ETHGateway} from "../../L2/gateways/IL2ETHGateway.sol";
|
||||
import {IL1ScrollMessenger} from "../IL1ScrollMessenger.sol";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {OwnableUpgradeable} from "@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol";
|
||||
import {IERC20Upgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC20/IERC20Upgradeable.sol";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {ClonesUpgradeable} from "@openzeppelin/contracts-upgradeable/proxy/ClonesUpgradeable.sol";
|
||||
import {IERC20MetadataUpgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC20/extensions/IERC20MetadataUpgradeable.sol";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {IWETH} from "../../interfaces/IWETH.sol";
|
||||
import {IL2ERC20Gateway} from "../../L2/gateways/IL2ERC20Gateway.sol";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {IFiatToken} from "../../../interfaces/IFiatToken.sol";
|
||||
import {IUSDCBurnableSourceBridge} from "../../../interfaces/IUSDCBurnableSourceBridge.sol";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {OwnableUpgradeable} from "@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol";
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity ^0.8.16;
|
||||
pragma solidity ^0.8.24;
|
||||
|
||||
interface IL1MessageQueue {
|
||||
/**********
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity ^0.8.16;
|
||||
pragma solidity ^0.8.24;
|
||||
|
||||
import {IL1MessageQueue} from "./IL1MessageQueue.sol";
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity ^0.8.16;
|
||||
pragma solidity ^0.8.24;
|
||||
|
||||
interface IL2GasPriceOracle {
|
||||
/// @notice Return the latest known l2 base fee.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity ^0.8.16;
|
||||
pragma solidity ^0.8.24;
|
||||
|
||||
interface IScrollChain {
|
||||
/**********
|
||||
@@ -24,12 +24,20 @@ interface IScrollChain {
|
||||
/// @param withdrawRoot The merkle root on layer2 after this batch.
|
||||
event FinalizeBatch(uint256 indexed batchIndex, bytes32 indexed batchHash, bytes32 stateRoot, bytes32 withdrawRoot);
|
||||
|
||||
/**********
|
||||
* Errors *
|
||||
**********/
|
||||
/// @notice Emitted when owner updates the status of sequencer.
|
||||
/// @param account The address of account updated.
|
||||
/// @param status The status of the account updated.
|
||||
event UpdateSequencer(address indexed account, bool status);
|
||||
|
||||
/// @dev Thrown when the given address is `address(0)`.
|
||||
error ErrorZeroAddress();
|
||||
/// @notice Emitted when owner updates the status of prover.
|
||||
/// @param account The address of account updated.
|
||||
/// @param status The status of the account updated.
|
||||
event UpdateProver(address indexed account, bool status);
|
||||
|
||||
/// @notice Emitted when the value of `maxNumTxInChunk` is updated.
|
||||
/// @param oldMaxNumTxInChunk The old value of `maxNumTxInChunk`.
|
||||
/// @param newMaxNumTxInChunk The new value of `maxNumTxInChunk`.
|
||||
event UpdateMaxNumTxInChunk(uint256 oldMaxNumTxInChunk, uint256 newMaxNumTxInChunk);
|
||||
|
||||
/*************************
|
||||
* Public View Functions *
|
||||
@@ -90,4 +98,26 @@ interface IScrollChain {
|
||||
bytes32 withdrawRoot,
|
||||
bytes calldata aggrProof
|
||||
) external;
|
||||
|
||||
/// @notice Finalize a committed batch (with blob) on layer 1.
|
||||
///
|
||||
/// @dev Memory layout of `blobDataProof`:
|
||||
/// | z | y | kzg_commitment | kzg_proof |
|
||||
/// |---------|---------|----------------|-----------|
|
||||
/// | bytes32 | bytes32 | bytes48 | bytes48 |
|
||||
///
|
||||
/// @param batchHeader The header of current batch, see the encoding in comments of `commitBatch.
|
||||
/// @param prevStateRoot The state root of parent batch.
|
||||
/// @param postStateRoot The state root of current batch.
|
||||
/// @param withdrawRoot The withdraw trie root of current batch.
|
||||
/// @param blobDataProof The proof for blob data.
|
||||
/// @param aggrProof The aggregation proof for current batch.
|
||||
function finalizeBatchWithProof4844(
|
||||
bytes calldata batchHeader,
|
||||
bytes32 prevStateRoot,
|
||||
bytes32 postStateRoot,
|
||||
bytes32 withdrawRoot,
|
||||
bytes calldata blobDataProof,
|
||||
bytes calldata aggrProof
|
||||
) external;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {OwnableUpgradeable} from "@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol";
|
||||
import {BitMapsUpgradeable} from "@openzeppelin/contracts-upgradeable/utils/structs/BitMapsUpgradeable.sol";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {IWhitelist} from "../../libraries/common/IWhitelist.sol";
|
||||
import {IL1MessageQueue} from "./IL1MessageQueue.sol";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {OwnableUpgradeable} from "@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol";
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {Ownable} from "@openzeppelin/contracts/access/Ownable.sol";
|
||||
|
||||
@@ -14,9 +14,30 @@ contract MultipleVersionRollupVerifier is IRollupVerifier, Ownable {
|
||||
**********/
|
||||
|
||||
/// @notice Emitted when the address of verifier is updated.
|
||||
/// @param version The version of the verifier.
|
||||
/// @param startBatchIndex The start batch index when the verifier will be used.
|
||||
/// @param verifier The address of new verifier.
|
||||
event UpdateVerifier(uint256 startBatchIndex, address verifier);
|
||||
event UpdateVerifier(uint256 version, uint256 startBatchIndex, address verifier);
|
||||
|
||||
/**********
|
||||
* Errors *
|
||||
**********/
|
||||
|
||||
/// @dev Thrown when the given address is `address(0)`.
|
||||
error ErrorZeroAddress();
|
||||
|
||||
/// @dev Thrown when the given start batch index is finalized.
|
||||
error ErrorStartBatchIndexFinalized();
|
||||
|
||||
/// @dev Thrown when the given start batch index is smaller than `latestVerifier.startBatchIndex`.
|
||||
error ErrorStartBatchIndexTooSmall();
|
||||
|
||||
/*************
|
||||
* Constants *
|
||||
*************/
|
||||
|
||||
/// @notice The address of ScrollChain contract.
|
||||
address immutable scrollChain;
|
||||
|
||||
/***********
|
||||
* Structs *
|
||||
@@ -33,29 +54,31 @@ contract MultipleVersionRollupVerifier is IRollupVerifier, Ownable {
|
||||
* Variables *
|
||||
*************/
|
||||
|
||||
/// @notice The list of legacy zkevm verifier, sorted by batchIndex in increasing order.
|
||||
Verifier[] public legacyVerifiers;
|
||||
/// @notice Mapping from verifier version to the list of legacy zkevm verifiers.
|
||||
/// The verifiers are sorted by batchIndex in increasing order.
|
||||
mapping(uint256 => Verifier[]) public legacyVerifiers;
|
||||
|
||||
/// @notice The lastest used zkevm verifier.
|
||||
Verifier public latestVerifier;
|
||||
|
||||
/// @notice The address of ScrollChain contract.
|
||||
address public scrollChain;
|
||||
/// @notice Mapping from verifier version to the lastest used zkevm verifier.
|
||||
mapping(uint256 => Verifier) public latestVerifier;
|
||||
|
||||
/***************
|
||||
* Constructor *
|
||||
***************/
|
||||
|
||||
constructor(address _verifier) {
|
||||
require(_verifier != address(0), "zero verifier address");
|
||||
|
||||
latestVerifier.verifier = _verifier;
|
||||
}
|
||||
|
||||
function initialize(address _scrollChain) external onlyOwner {
|
||||
require(scrollChain == address(0), "initialized");
|
||||
|
||||
constructor(
|
||||
address _scrollChain,
|
||||
uint256[] memory _versions,
|
||||
address[] memory _verifiers
|
||||
) {
|
||||
if (_scrollChain == address(0)) revert ErrorZeroAddress();
|
||||
scrollChain = _scrollChain;
|
||||
|
||||
for (uint256 i = 0; i < _versions.length; i++) {
|
||||
if (_verifiers[i] == address(0)) revert ErrorZeroAddress();
|
||||
latestVerifier[_versions[i]].verifier = _verifiers[i];
|
||||
|
||||
emit UpdateVerifier(_versions[i], 0, _verifiers[i]);
|
||||
}
|
||||
}
|
||||
|
||||
/*************************
|
||||
@@ -63,23 +86,24 @@ contract MultipleVersionRollupVerifier is IRollupVerifier, Ownable {
|
||||
*************************/
|
||||
|
||||
/// @notice Return the number of legacy verifiers.
|
||||
function legacyVerifiersLength() external view returns (uint256) {
|
||||
return legacyVerifiers.length;
|
||||
function legacyVerifiersLength(uint256 _version) external view returns (uint256) {
|
||||
return legacyVerifiers[_version].length;
|
||||
}
|
||||
|
||||
/// @notice Compute the verifier should be used for specific batch.
|
||||
/// @param _version The version of verifier to query.
|
||||
/// @param _batchIndex The batch index to query.
|
||||
function getVerifier(uint256 _batchIndex) public view returns (address) {
|
||||
function getVerifier(uint256 _version, uint256 _batchIndex) public view returns (address) {
|
||||
// Normally, we will use the latest verifier.
|
||||
Verifier memory _verifier = latestVerifier;
|
||||
Verifier memory _verifier = latestVerifier[_version];
|
||||
|
||||
if (_verifier.startBatchIndex > _batchIndex) {
|
||||
uint256 _length = legacyVerifiers.length;
|
||||
uint256 _length = legacyVerifiers[_version].length;
|
||||
// In most case, only last few verifier will be used by `ScrollChain`.
|
||||
// So, we use linear search instead of binary search.
|
||||
unchecked {
|
||||
for (uint256 i = _length; i > 0; --i) {
|
||||
_verifier = legacyVerifiers[i - 1];
|
||||
_verifier = legacyVerifiers[_version][i - 1];
|
||||
if (_verifier.startBatchIndex <= _batchIndex) break;
|
||||
}
|
||||
}
|
||||
@@ -98,7 +122,19 @@ contract MultipleVersionRollupVerifier is IRollupVerifier, Ownable {
|
||||
bytes calldata _aggrProof,
|
||||
bytes32 _publicInputHash
|
||||
) external view override {
|
||||
address _verifier = getVerifier(_batchIndex);
|
||||
address _verifier = getVerifier(0, _batchIndex);
|
||||
|
||||
IZkEvmVerifier(_verifier).verify(_aggrProof, _publicInputHash);
|
||||
}
|
||||
|
||||
/// @inheritdoc IRollupVerifier
|
||||
function verifyAggregateProof(
|
||||
uint256 _version,
|
||||
uint256 _batchIndex,
|
||||
bytes calldata _aggrProof,
|
||||
bytes32 _publicInputHash
|
||||
) external view override {
|
||||
address _verifier = getVerifier(_version, _batchIndex);
|
||||
|
||||
IZkEvmVerifier(_verifier).verify(_aggrProof, _publicInputHash);
|
||||
}
|
||||
@@ -110,21 +146,29 @@ contract MultipleVersionRollupVerifier is IRollupVerifier, Ownable {
|
||||
/// @notice Update the address of zkevm verifier.
|
||||
/// @param _startBatchIndex The start batch index when the verifier will be used.
|
||||
/// @param _verifier The address of new verifier.
|
||||
function updateVerifier(uint64 _startBatchIndex, address _verifier) external onlyOwner {
|
||||
require(_startBatchIndex > IScrollChain(scrollChain).lastFinalizedBatchIndex(), "start batch index finalized");
|
||||
function updateVerifier(
|
||||
uint256 _version,
|
||||
uint64 _startBatchIndex,
|
||||
address _verifier
|
||||
) external onlyOwner {
|
||||
if (_startBatchIndex <= IScrollChain(scrollChain).lastFinalizedBatchIndex())
|
||||
revert ErrorStartBatchIndexFinalized();
|
||||
|
||||
Verifier memory _latestVerifier = latestVerifier;
|
||||
require(_startBatchIndex >= _latestVerifier.startBatchIndex, "start batch index too small");
|
||||
require(_verifier != address(0), "zero verifier address");
|
||||
Verifier memory _latestVerifier = latestVerifier[_version];
|
||||
if (_startBatchIndex < _latestVerifier.startBatchIndex) revert ErrorStartBatchIndexTooSmall();
|
||||
if (_verifier == address(0)) revert ErrorZeroAddress();
|
||||
|
||||
if (_latestVerifier.startBatchIndex < _startBatchIndex) {
|
||||
legacyVerifiers.push(_latestVerifier);
|
||||
// don't push when it is the first update of the version.
|
||||
if (_latestVerifier.verifier != address(0)) {
|
||||
legacyVerifiers[_version].push(_latestVerifier);
|
||||
}
|
||||
_latestVerifier.startBatchIndex = _startBatchIndex;
|
||||
}
|
||||
_latestVerifier.verifier = _verifier;
|
||||
|
||||
latestVerifier = _latestVerifier;
|
||||
latestVerifier[_version] = _latestVerifier;
|
||||
|
||||
emit UpdateVerifier(_startBatchIndex, _verifier);
|
||||
emit UpdateVerifier(_version, _startBatchIndex, _verifier);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {OwnableUpgradeable} from "@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol";
|
||||
import {PausableUpgradeable} from "@openzeppelin/contracts-upgradeable/security/PausableUpgradeable.sol";
|
||||
@@ -8,7 +8,9 @@ import {PausableUpgradeable} from "@openzeppelin/contracts-upgradeable/security/
|
||||
import {IL1MessageQueue} from "./IL1MessageQueue.sol";
|
||||
import {IScrollChain} from "./IScrollChain.sol";
|
||||
import {BatchHeaderV0Codec} from "../../libraries/codec/BatchHeaderV0Codec.sol";
|
||||
import {ChunkCodec} from "../../libraries/codec/ChunkCodec.sol";
|
||||
import {BatchHeaderV1Codec} from "../../libraries/codec/BatchHeaderV1Codec.sol";
|
||||
import {ChunkCodecV0} from "../../libraries/codec/ChunkCodecV0.sol";
|
||||
import {ChunkCodecV1} from "../../libraries/codec/ChunkCodecV1.sol";
|
||||
import {IRollupVerifier} from "../../libraries/verifier/IRollupVerifier.sol";
|
||||
|
||||
// solhint-disable no-inline-assembly
|
||||
@@ -18,28 +20,107 @@ import {IRollupVerifier} from "../../libraries/verifier/IRollupVerifier.sol";
|
||||
/// @notice This contract maintains data for the Scroll rollup.
|
||||
contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
/**********
|
||||
* Events *
|
||||
* Errors *
|
||||
**********/
|
||||
|
||||
/// @notice Emitted when owner updates the status of sequencer.
|
||||
/// @param account The address of account updated.
|
||||
/// @param status The status of the account updated.
|
||||
event UpdateSequencer(address indexed account, bool status);
|
||||
/// @dev Thrown when the given account is not EOA account.
|
||||
error ErrorAccountIsNotEOA();
|
||||
|
||||
/// @notice Emitted when owner updates the status of prover.
|
||||
/// @param account The address of account updated.
|
||||
/// @param status The status of the account updated.
|
||||
event UpdateProver(address indexed account, bool status);
|
||||
/// @dev Thrown when committing a committed batch.
|
||||
error ErrorBatchIsAlreadyCommitted();
|
||||
|
||||
/// @notice Emitted when the value of `maxNumTxInChunk` is updated.
|
||||
/// @param oldMaxNumTxInChunk The old value of `maxNumTxInChunk`.
|
||||
/// @param newMaxNumTxInChunk The new value of `maxNumTxInChunk`.
|
||||
event UpdateMaxNumTxInChunk(uint256 oldMaxNumTxInChunk, uint256 newMaxNumTxInChunk);
|
||||
/// @dev Thrown when finalizing a verified batch.
|
||||
error ErrorBatchIsAlreadyVerified();
|
||||
|
||||
/// @dev Thrown when committing empty batch (batch without chunks)
|
||||
error ErrorBatchIsEmpty();
|
||||
|
||||
/// @dev Thrown when call precompile failed.
|
||||
error ErrorCallPointEvaluationPrecompileFailed();
|
||||
|
||||
/// @dev Thrown when the caller is not prover.
|
||||
error ErrorCallerIsNotProver();
|
||||
|
||||
/// @dev Thrown when the caller is not sequencer.
|
||||
error ErrorCallerIsNotSequencer();
|
||||
|
||||
/// @dev Thrown when the transaction has multiple blobs.
|
||||
error ErrorFoundMultipleBlob();
|
||||
|
||||
/// @dev Thrown when some fields are not zero in genesis batch.
|
||||
error ErrorGenesisBatchHasNonZeroField();
|
||||
|
||||
/// @dev Thrown when importing genesis batch twice.
|
||||
error ErrorGenesisBatchImported();
|
||||
|
||||
/// @dev Thrown when data hash in genesis batch is zero.
|
||||
error ErrorGenesisDataHashIsZero();
|
||||
|
||||
/// @dev Thrown when the parent batch hash in genesis batch is zero.
|
||||
error ErrorGenesisParentBatchHashIsNonZero();
|
||||
|
||||
/// @dev Thrown when the l2 transaction is incomplete.
|
||||
error ErrorIncompleteL2TransactionData();
|
||||
|
||||
/// @dev Thrown when the batch hash is incorrect.
|
||||
error ErrorIncorrectBatchHash();
|
||||
|
||||
/// @dev Thrown when the batch index is incorrect.
|
||||
error ErrorIncorrectBatchIndex();
|
||||
|
||||
/// @dev Thrown when the bitmap length is incorrect.
|
||||
error ErrorIncorrectBitmapLength();
|
||||
|
||||
/// @dev Thrown when the previous state root doesn't match stored one.
|
||||
error ErrorIncorrectPreviousStateRoot();
|
||||
|
||||
/// @dev Thrown when the batch header version is invalid.
|
||||
error ErrorInvalidBatchHeaderVersion();
|
||||
|
||||
/// @dev Thrown when the last message is skipped.
|
||||
error ErrorLastL1MessageSkipped();
|
||||
|
||||
/// @dev Thrown when no blob found in the transaction.
|
||||
error ErrorNoBlobFound();
|
||||
|
||||
/// @dev Thrown when the number of transactions is less than number of L1 message in one block.
|
||||
error ErrorNumTxsLessThanNumL1Msgs();
|
||||
|
||||
/// @dev Thrown when the given previous state is zero.
|
||||
error ErrorPreviousStateRootIsZero();
|
||||
|
||||
/// @dev Thrown when the number of batches to revert is zero.
|
||||
error ErrorRevertZeroBatches();
|
||||
|
||||
/// @dev Thrown when the reverted batches are not in the ending of commited batch chain.
|
||||
error ErrorRevertNotStartFromEnd();
|
||||
|
||||
/// @dev Thrown when reverting a finialized batch.
|
||||
error ErrorRevertFinalizedBatch();
|
||||
|
||||
/// @dev Thrown when the given state root is zero.
|
||||
error ErrorStateRootIsZero();
|
||||
|
||||
/// @dev Thrown when a chunk contains too many transactions.
|
||||
error ErrorTooManyTxsInOneChunk();
|
||||
|
||||
/// @dev Thrown when the precompile output is incorrect.
|
||||
error ErrorUnexpectedPointEvaluationPrecompileOutput();
|
||||
|
||||
/// @dev Thrown when the given address is `address(0)`.
|
||||
error ErrorZeroAddress();
|
||||
|
||||
/*************
|
||||
* Constants *
|
||||
*************/
|
||||
|
||||
/// @dev Address of the point evaluation precompile used for EIP-4844 blob verification.
|
||||
address constant POINT_EVALUATION_PRECOMPILE_ADDR = address(0x0A);
|
||||
|
||||
/// @dev BLS Modulus value defined in EIP-4844 and the magic value returned from a successful call to the
|
||||
/// point evaluation precompile
|
||||
uint256 constant BLS_MODULUS = 52435875175126190479447740508185965837690552500527637822603658699938581184513;
|
||||
|
||||
/// @notice The chain id of the corresponding layer 2 chain.
|
||||
uint64 public immutable layer2ChainId;
|
||||
|
||||
@@ -86,12 +167,12 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
|
||||
modifier OnlySequencer() {
|
||||
// @note In the decentralized mode, it should be only called by a list of validator.
|
||||
require(isSequencer[_msgSender()], "caller not sequencer");
|
||||
if (!isSequencer[_msgSender()]) revert ErrorCallerIsNotSequencer();
|
||||
_;
|
||||
}
|
||||
|
||||
modifier OnlyProver() {
|
||||
require(isProver[_msgSender()], "caller not prover");
|
||||
if (!isProver[_msgSender()]) revert ErrorCallerIsNotProver();
|
||||
_;
|
||||
}
|
||||
|
||||
@@ -157,23 +238,23 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
/// @notice Import layer 2 genesis block
|
||||
function importGenesisBatch(bytes calldata _batchHeader, bytes32 _stateRoot) external {
|
||||
// check genesis batch header length
|
||||
require(_stateRoot != bytes32(0), "zero state root");
|
||||
if (_stateRoot == bytes32(0)) revert ErrorStateRootIsZero();
|
||||
|
||||
// check whether the genesis batch is imported
|
||||
require(finalizedStateRoots[0] == bytes32(0), "Genesis batch imported");
|
||||
if (finalizedStateRoots[0] != bytes32(0)) revert ErrorGenesisBatchImported();
|
||||
|
||||
(uint256 memPtr, bytes32 _batchHash) = _loadBatchHeader(_batchHeader);
|
||||
(uint256 memPtr, bytes32 _batchHash, , ) = _loadBatchHeader(_batchHeader);
|
||||
|
||||
// check all fields except `dataHash` and `lastBlockHash` are zero
|
||||
unchecked {
|
||||
uint256 sum = BatchHeaderV0Codec.version(memPtr) +
|
||||
BatchHeaderV0Codec.batchIndex(memPtr) +
|
||||
BatchHeaderV0Codec.l1MessagePopped(memPtr) +
|
||||
BatchHeaderV0Codec.totalL1MessagePopped(memPtr);
|
||||
require(sum == 0, "not all fields are zero");
|
||||
uint256 sum = BatchHeaderV0Codec.getVersion(memPtr) +
|
||||
BatchHeaderV0Codec.getBatchIndex(memPtr) +
|
||||
BatchHeaderV0Codec.getL1MessagePopped(memPtr) +
|
||||
BatchHeaderV0Codec.getTotalL1MessagePopped(memPtr);
|
||||
if (sum != 0) revert ErrorGenesisBatchHasNonZeroField();
|
||||
}
|
||||
require(BatchHeaderV0Codec.dataHash(memPtr) != bytes32(0), "zero data hash");
|
||||
require(BatchHeaderV0Codec.parentBatchHash(memPtr) == bytes32(0), "nonzero parent batch hash");
|
||||
if (BatchHeaderV0Codec.getDataHash(memPtr) == bytes32(0)) revert ErrorGenesisDataHashIsZero();
|
||||
if (BatchHeaderV0Codec.getParentBatchHash(memPtr) != bytes32(0)) revert ErrorGenesisParentBatchHashIsNonZero();
|
||||
|
||||
committedBatches[0] = _batchHash;
|
||||
finalizedStateRoots[0] = _stateRoot;
|
||||
@@ -189,89 +270,80 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
bytes[] memory _chunks,
|
||||
bytes calldata _skippedL1MessageBitmap
|
||||
) external override OnlySequencer whenNotPaused {
|
||||
require(_version == 0, "invalid version");
|
||||
|
||||
// check whether the batch is empty
|
||||
uint256 _chunksLength = _chunks.length;
|
||||
require(_chunksLength > 0, "batch is empty");
|
||||
if (_chunks.length == 0) revert ErrorBatchIsEmpty();
|
||||
|
||||
// The overall memory layout in this function is organized as follows
|
||||
// +---------------------+-------------------+------------------+
|
||||
// | parent batch header | chunk data hashes | new batch header |
|
||||
// +---------------------+-------------------+------------------+
|
||||
// ^ ^ ^
|
||||
// batchPtr dataPtr newBatchPtr (re-use var batchPtr)
|
||||
//
|
||||
// 1. We copy the parent batch header from calldata to memory starting at batchPtr
|
||||
// 2. We store `_chunksLength` number of Keccak hashes starting at `dataPtr`. Each Keccak
|
||||
// hash corresponds to the data hash of a chunk. So we reserve the memory region from
|
||||
// `dataPtr` to `dataPtr + _chunkLength * 32` for the chunk data hashes.
|
||||
// 3. The memory starting at `newBatchPtr` is used to store the new batch header and compute
|
||||
// the batch hash.
|
||||
|
||||
// the variable `batchPtr` will be reused later for the current batch
|
||||
(uint256 batchPtr, bytes32 _parentBatchHash) = _loadBatchHeader(_parentBatchHeader);
|
||||
|
||||
uint256 _batchIndex = BatchHeaderV0Codec.batchIndex(batchPtr);
|
||||
uint256 _totalL1MessagesPoppedOverall = BatchHeaderV0Codec.totalL1MessagePopped(batchPtr);
|
||||
require(committedBatches[_batchIndex] == _parentBatchHash, "incorrect parent batch hash");
|
||||
require(committedBatches[_batchIndex + 1] == 0, "batch already committed");
|
||||
|
||||
// load `dataPtr` and reserve the memory region for chunk data hashes
|
||||
uint256 dataPtr;
|
||||
assembly {
|
||||
dataPtr := mload(0x40)
|
||||
mstore(0x40, add(dataPtr, mul(_chunksLength, 32)))
|
||||
(, bytes32 _parentBatchHash, uint256 _batchIndex, uint256 _totalL1MessagesPoppedOverall) = _loadBatchHeader(
|
||||
_parentBatchHeader
|
||||
);
|
||||
unchecked {
|
||||
_batchIndex += 1;
|
||||
}
|
||||
if (committedBatches[_batchIndex] != 0) revert ErrorBatchIsAlreadyCommitted();
|
||||
|
||||
// compute the data hash for each chunk
|
||||
bytes32 _batchHash;
|
||||
uint256 batchPtr;
|
||||
bytes32 _dataHash;
|
||||
uint256 _totalL1MessagesPoppedInBatch;
|
||||
for (uint256 i = 0; i < _chunksLength; i++) {
|
||||
uint256 _totalNumL1MessagesInChunk = _commitChunk(
|
||||
dataPtr,
|
||||
_chunks[i],
|
||||
_totalL1MessagesPoppedInBatch,
|
||||
if (_version == 0) {
|
||||
(_dataHash, _totalL1MessagesPoppedInBatch) = _commitChunksV0(
|
||||
_totalL1MessagesPoppedOverall,
|
||||
_chunks,
|
||||
_skippedL1MessageBitmap
|
||||
);
|
||||
|
||||
unchecked {
|
||||
_totalL1MessagesPoppedInBatch += _totalNumL1MessagesInChunk;
|
||||
_totalL1MessagesPoppedOverall += _totalNumL1MessagesInChunk;
|
||||
dataPtr += 32;
|
||||
assembly {
|
||||
batchPtr := mload(0x40)
|
||||
_totalL1MessagesPoppedOverall := add(_totalL1MessagesPoppedOverall, _totalL1MessagesPoppedInBatch)
|
||||
}
|
||||
// store entries, the order matters
|
||||
BatchHeaderV0Codec.storeVersion(batchPtr, 0);
|
||||
BatchHeaderV0Codec.storeBatchIndex(batchPtr, _batchIndex);
|
||||
BatchHeaderV0Codec.storeL1MessagePopped(batchPtr, _totalL1MessagesPoppedInBatch);
|
||||
BatchHeaderV0Codec.storeTotalL1MessagePopped(batchPtr, _totalL1MessagesPoppedOverall);
|
||||
BatchHeaderV0Codec.storeDataHash(batchPtr, _dataHash);
|
||||
BatchHeaderV0Codec.storeParentBatchHash(batchPtr, _parentBatchHash);
|
||||
BatchHeaderV0Codec.storeSkippedBitmap(batchPtr, _skippedL1MessageBitmap);
|
||||
// compute batch hash
|
||||
_batchHash = BatchHeaderV0Codec.computeBatchHash(
|
||||
batchPtr,
|
||||
BatchHeaderV0Codec.BATCH_HEADER_FIXED_LENGTH + _skippedL1MessageBitmap.length
|
||||
);
|
||||
} else if (_version == 1) {
|
||||
bytes32 blobVersionedHash;
|
||||
(blobVersionedHash, _dataHash, _totalL1MessagesPoppedInBatch) = _commitChunksV1(
|
||||
_totalL1MessagesPoppedOverall,
|
||||
_chunks,
|
||||
_skippedL1MessageBitmap
|
||||
);
|
||||
assembly {
|
||||
batchPtr := mload(0x40)
|
||||
_totalL1MessagesPoppedOverall := add(_totalL1MessagesPoppedOverall, _totalL1MessagesPoppedInBatch)
|
||||
}
|
||||
// store entries, the order matters
|
||||
BatchHeaderV1Codec.storeVersion(batchPtr, 1);
|
||||
BatchHeaderV1Codec.storeBatchIndex(batchPtr, _batchIndex);
|
||||
BatchHeaderV1Codec.storeL1MessagePopped(batchPtr, _totalL1MessagesPoppedInBatch);
|
||||
BatchHeaderV1Codec.storeTotalL1MessagePopped(batchPtr, _totalL1MessagesPoppedOverall);
|
||||
BatchHeaderV1Codec.storeDataHash(batchPtr, _dataHash);
|
||||
BatchHeaderV1Codec.storeBlobVersionedHash(batchPtr, blobVersionedHash);
|
||||
BatchHeaderV1Codec.storeParentBatchHash(batchPtr, _parentBatchHash);
|
||||
BatchHeaderV1Codec.storeSkippedBitmap(batchPtr, _skippedL1MessageBitmap);
|
||||
// compute batch hash
|
||||
_batchHash = BatchHeaderV1Codec.computeBatchHash(
|
||||
batchPtr,
|
||||
BatchHeaderV1Codec.BATCH_HEADER_FIXED_LENGTH + _skippedL1MessageBitmap.length
|
||||
);
|
||||
} else {
|
||||
revert ErrorInvalidBatchHeaderVersion();
|
||||
}
|
||||
|
||||
// check the length of bitmap
|
||||
unchecked {
|
||||
require(
|
||||
((_totalL1MessagesPoppedInBatch + 255) / 256) * 32 == _skippedL1MessageBitmap.length,
|
||||
"wrong bitmap length"
|
||||
);
|
||||
if (((_totalL1MessagesPoppedInBatch + 255) / 256) * 32 != _skippedL1MessageBitmap.length) {
|
||||
revert ErrorIncorrectBitmapLength();
|
||||
}
|
||||
}
|
||||
|
||||
// compute the data hash for current batch
|
||||
bytes32 _dataHash;
|
||||
assembly {
|
||||
let dataLen := mul(_chunksLength, 0x20)
|
||||
_dataHash := keccak256(sub(dataPtr, dataLen), dataLen)
|
||||
|
||||
batchPtr := mload(0x40) // reset batchPtr
|
||||
_batchIndex := add(_batchIndex, 1) // increase batch index
|
||||
}
|
||||
|
||||
// store entries, the order matters
|
||||
BatchHeaderV0Codec.storeVersion(batchPtr, _version);
|
||||
BatchHeaderV0Codec.storeBatchIndex(batchPtr, _batchIndex);
|
||||
BatchHeaderV0Codec.storeL1MessagePopped(batchPtr, _totalL1MessagesPoppedInBatch);
|
||||
BatchHeaderV0Codec.storeTotalL1MessagePopped(batchPtr, _totalL1MessagesPoppedOverall);
|
||||
BatchHeaderV0Codec.storeDataHash(batchPtr, _dataHash);
|
||||
BatchHeaderV0Codec.storeParentBatchHash(batchPtr, _parentBatchHash);
|
||||
BatchHeaderV0Codec.storeSkippedBitmap(batchPtr, _skippedL1MessageBitmap);
|
||||
|
||||
// compute batch hash
|
||||
bytes32 _batchHash = BatchHeaderV0Codec.computeBatchHash(batchPtr, 89 + _skippedL1MessageBitmap.length);
|
||||
|
||||
committedBatches[_batchIndex] = _batchHash;
|
||||
emit CommitBatch(_batchIndex, _batchHash);
|
||||
}
|
||||
@@ -280,18 +352,14 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
/// @dev If the owner want to revert a sequence of batches by sending multiple transactions,
|
||||
/// make sure to revert recent batches first.
|
||||
function revertBatch(bytes calldata _batchHeader, uint256 _count) external onlyOwner {
|
||||
require(_count > 0, "count must be nonzero");
|
||||
if (_count == 0) revert ErrorRevertZeroBatches();
|
||||
|
||||
(uint256 memPtr, bytes32 _batchHash) = _loadBatchHeader(_batchHeader);
|
||||
|
||||
// check batch hash
|
||||
uint256 _batchIndex = BatchHeaderV0Codec.batchIndex(memPtr);
|
||||
require(committedBatches[_batchIndex] == _batchHash, "incorrect batch hash");
|
||||
(, bytes32 _batchHash, uint256 _batchIndex, ) = _loadBatchHeader(_batchHeader);
|
||||
// make sure no gap is left when reverting from the ending to the beginning.
|
||||
require(committedBatches[_batchIndex + _count] == bytes32(0), "reverting must start from the ending");
|
||||
if (committedBatches[_batchIndex + _count] != bytes32(0)) revert ErrorRevertNotStartFromEnd();
|
||||
|
||||
// check finalization
|
||||
require(_batchIndex > lastFinalizedBatchIndex, "can only revert unfinalized batch");
|
||||
if (_batchIndex <= lastFinalizedBatchIndex) revert ErrorRevertFinalizedBatch();
|
||||
|
||||
while (_count > 0) {
|
||||
committedBatches[_batchIndex] = bytes32(0);
|
||||
@@ -309,6 +377,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
}
|
||||
|
||||
/// @inheritdoc IScrollChain
|
||||
/// @dev We keep this function to upgrade to 4844 more smoothly.
|
||||
function finalizeBatchWithProof(
|
||||
bytes calldata _batchHeader,
|
||||
bytes32 _prevStateRoot,
|
||||
@@ -316,21 +385,18 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
bytes32 _withdrawRoot,
|
||||
bytes calldata _aggrProof
|
||||
) external override OnlyProver whenNotPaused {
|
||||
require(_prevStateRoot != bytes32(0), "previous state root is zero");
|
||||
require(_postStateRoot != bytes32(0), "new state root is zero");
|
||||
if (_prevStateRoot == bytes32(0)) revert ErrorPreviousStateRootIsZero();
|
||||
if (_postStateRoot == bytes32(0)) revert ErrorStateRootIsZero();
|
||||
|
||||
// compute batch hash and verify
|
||||
(uint256 memPtr, bytes32 _batchHash) = _loadBatchHeader(_batchHeader);
|
||||
|
||||
bytes32 _dataHash = BatchHeaderV0Codec.dataHash(memPtr);
|
||||
uint256 _batchIndex = BatchHeaderV0Codec.batchIndex(memPtr);
|
||||
require(committedBatches[_batchIndex] == _batchHash, "incorrect batch hash");
|
||||
(uint256 memPtr, bytes32 _batchHash, uint256 _batchIndex, ) = _loadBatchHeader(_batchHeader);
|
||||
bytes32 _dataHash = BatchHeaderV0Codec.getDataHash(memPtr);
|
||||
|
||||
// verify previous state root.
|
||||
require(finalizedStateRoots[_batchIndex - 1] == _prevStateRoot, "incorrect previous state root");
|
||||
if (finalizedStateRoots[_batchIndex - 1] != _prevStateRoot) revert ErrorIncorrectPreviousStateRoot();
|
||||
|
||||
// avoid duplicated verification
|
||||
require(finalizedStateRoots[_batchIndex] == bytes32(0), "batch already verified");
|
||||
if (finalizedStateRoots[_batchIndex] != bytes32(0)) revert ErrorBatchIsAlreadyVerified();
|
||||
|
||||
// compute public input hash
|
||||
bytes32 _publicInputHash = keccak256(
|
||||
@@ -338,11 +404,11 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
);
|
||||
|
||||
// verify batch
|
||||
IRollupVerifier(verifier).verifyAggregateProof(_batchIndex, _aggrProof, _publicInputHash);
|
||||
IRollupVerifier(verifier).verifyAggregateProof(0, _batchIndex, _aggrProof, _publicInputHash);
|
||||
|
||||
// check and update lastFinalizedBatchIndex
|
||||
unchecked {
|
||||
require(lastFinalizedBatchIndex + 1 == _batchIndex, "incorrect batch index");
|
||||
if (lastFinalizedBatchIndex + 1 != _batchIndex) revert ErrorIncorrectBatchIndex();
|
||||
lastFinalizedBatchIndex = _batchIndex;
|
||||
}
|
||||
|
||||
@@ -351,27 +417,93 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
withdrawRoots[_batchIndex] = _withdrawRoot;
|
||||
|
||||
// Pop finalized and non-skipped message from L1MessageQueue.
|
||||
uint256 _l1MessagePopped = BatchHeaderV0Codec.l1MessagePopped(memPtr);
|
||||
if (_l1MessagePopped > 0) {
|
||||
IL1MessageQueue _queue = IL1MessageQueue(messageQueue);
|
||||
_popL1Messages(
|
||||
BatchHeaderV0Codec.getSkippedBitmapPtr(memPtr),
|
||||
BatchHeaderV0Codec.getTotalL1MessagePopped(memPtr),
|
||||
BatchHeaderV0Codec.getL1MessagePopped(memPtr)
|
||||
);
|
||||
|
||||
unchecked {
|
||||
uint256 _startIndex = BatchHeaderV0Codec.totalL1MessagePopped(memPtr) - _l1MessagePopped;
|
||||
emit FinalizeBatch(_batchIndex, _batchHash, _postStateRoot, _withdrawRoot);
|
||||
}
|
||||
|
||||
for (uint256 i = 0; i < _l1MessagePopped; i += 256) {
|
||||
uint256 _count = 256;
|
||||
if (_l1MessagePopped - i < _count) {
|
||||
_count = _l1MessagePopped - i;
|
||||
}
|
||||
uint256 _skippedBitmap = BatchHeaderV0Codec.skippedBitmap(memPtr, i / 256);
|
||||
/// @inheritdoc IScrollChain
|
||||
/// @dev Memory layout of `_blobDataProof`:
|
||||
/// ```text
|
||||
/// | z | y | kzg_commitment | kzg_proof |
|
||||
/// |---------|---------|----------------|-----------|
|
||||
/// | bytes32 | bytes32 | bytes48 | bytes48 |
|
||||
/// ```
|
||||
function finalizeBatchWithProof4844(
|
||||
bytes calldata _batchHeader,
|
||||
bytes32 _prevStateRoot,
|
||||
bytes32 _postStateRoot,
|
||||
bytes32 _withdrawRoot,
|
||||
bytes calldata _blobDataProof,
|
||||
bytes calldata _aggrProof
|
||||
) external override OnlyProver whenNotPaused {
|
||||
if (_prevStateRoot == bytes32(0)) revert ErrorPreviousStateRootIsZero();
|
||||
if (_postStateRoot == bytes32(0)) revert ErrorStateRootIsZero();
|
||||
|
||||
_queue.popCrossDomainMessage(_startIndex, _count, _skippedBitmap);
|
||||
// compute batch hash and verify
|
||||
(uint256 memPtr, bytes32 _batchHash, uint256 _batchIndex, ) = _loadBatchHeader(_batchHeader);
|
||||
bytes32 _dataHash = BatchHeaderV1Codec.getDataHash(memPtr);
|
||||
bytes32 _blobVersionedHash = BatchHeaderV1Codec.getBlobVersionedHash(memPtr);
|
||||
|
||||
_startIndex += 256;
|
||||
}
|
||||
}
|
||||
// Calls the point evaluation precompile and verifies the output
|
||||
{
|
||||
(bool success, bytes memory data) = POINT_EVALUATION_PRECOMPILE_ADDR.staticcall(
|
||||
abi.encodePacked(_blobVersionedHash, _blobDataProof)
|
||||
);
|
||||
// We verify that the point evaluation precompile call was successful by testing the latter 32 bytes of the
|
||||
// response is equal to BLS_MODULUS as defined in https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile
|
||||
if (!success) revert ErrorCallPointEvaluationPrecompileFailed();
|
||||
(, uint256 result) = abi.decode(data, (uint256, uint256));
|
||||
if (result != BLS_MODULUS) revert ErrorUnexpectedPointEvaluationPrecompileOutput();
|
||||
}
|
||||
|
||||
// verify previous state root.
|
||||
if (finalizedStateRoots[_batchIndex - 1] != _prevStateRoot) revert ErrorIncorrectPreviousStateRoot();
|
||||
|
||||
// avoid duplicated verification
|
||||
if (finalizedStateRoots[_batchIndex] != bytes32(0)) revert ErrorBatchIsAlreadyVerified();
|
||||
|
||||
// compute public input hash
|
||||
bytes32 _publicInputHash = keccak256(
|
||||
abi.encodePacked(
|
||||
layer2ChainId,
|
||||
_prevStateRoot,
|
||||
_postStateRoot,
|
||||
_withdrawRoot,
|
||||
_dataHash,
|
||||
_blobDataProof[0:64]
|
||||
)
|
||||
);
|
||||
|
||||
// load version from batch header, it is always the first byte.
|
||||
uint256 batchVersion;
|
||||
assembly {
|
||||
batchVersion := shr(248, calldataload(_batchHeader.offset))
|
||||
}
|
||||
// verify batch
|
||||
IRollupVerifier(verifier).verifyAggregateProof(batchVersion, _batchIndex, _aggrProof, _publicInputHash);
|
||||
|
||||
// check and update lastFinalizedBatchIndex
|
||||
unchecked {
|
||||
if (lastFinalizedBatchIndex + 1 != _batchIndex) revert ErrorIncorrectBatchIndex();
|
||||
lastFinalizedBatchIndex = _batchIndex;
|
||||
}
|
||||
|
||||
// record state root and withdraw root
|
||||
finalizedStateRoots[_batchIndex] = _postStateRoot;
|
||||
withdrawRoots[_batchIndex] = _withdrawRoot;
|
||||
|
||||
// Pop finalized and non-skipped message from L1MessageQueue.
|
||||
_popL1Messages(
|
||||
BatchHeaderV1Codec.getSkippedBitmapPtr(memPtr),
|
||||
BatchHeaderV1Codec.getTotalL1MessagePopped(memPtr),
|
||||
BatchHeaderV1Codec.getL1MessagePopped(memPtr)
|
||||
);
|
||||
|
||||
emit FinalizeBatch(_batchIndex, _batchHash, _postStateRoot, _withdrawRoot);
|
||||
}
|
||||
|
||||
@@ -384,7 +516,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
function addSequencer(address _account) external onlyOwner {
|
||||
// @note Currently many external services rely on EOA sequencer to decode metadata directly from tx.calldata.
|
||||
// So we explicitly make sure the account is EOA.
|
||||
require(_account.code.length == 0, "not EOA");
|
||||
if (_account.code.length > 0) revert ErrorAccountIsNotEOA();
|
||||
|
||||
isSequencer[_account] = true;
|
||||
|
||||
@@ -404,7 +536,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
function addProver(address _account) external onlyOwner {
|
||||
// @note Currently many external services rely on EOA prover to decode metadata directly from tx.calldata.
|
||||
// So we explicitly make sure the account is EOA.
|
||||
require(_account.code.length == 0, "not EOA");
|
||||
if (_account.code.length > 0) revert ErrorAccountIsNotEOA();
|
||||
isProver[_account] = true;
|
||||
|
||||
emit UpdateProver(_account, true);
|
||||
@@ -441,56 +573,195 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
* Internal Functions *
|
||||
**********************/
|
||||
|
||||
/// @dev Internal function to load batch header from calldata to memory.
|
||||
/// @param _batchHeader The batch header in calldata.
|
||||
/// @return memPtr The start memory offset of loaded batch header.
|
||||
/// @return _batchHash The hash of the loaded batch header.
|
||||
function _loadBatchHeader(bytes calldata _batchHeader) internal pure returns (uint256 memPtr, bytes32 _batchHash) {
|
||||
// load to memory
|
||||
uint256 _length;
|
||||
(memPtr, _length) = BatchHeaderV0Codec.loadAndValidate(_batchHeader);
|
||||
/// @dev Internal function to commit chunks with version 0
|
||||
/// @param _totalL1MessagesPoppedOverall The number of L1 messages popped before the list of chunks.
|
||||
/// @param _chunks The list of chunks to commit.
|
||||
/// @param _skippedL1MessageBitmap The bitmap indicates whether each L1 message is skipped or not.
|
||||
/// @return _batchDataHash The computed data hash for the list of chunks.
|
||||
/// @return _totalL1MessagesPoppedInBatch The total number of L1 messages poped in this batch, including skipped one.
|
||||
function _commitChunksV0(
|
||||
uint256 _totalL1MessagesPoppedOverall,
|
||||
bytes[] memory _chunks,
|
||||
bytes calldata _skippedL1MessageBitmap
|
||||
) internal view returns (bytes32 _batchDataHash, uint256 _totalL1MessagesPoppedInBatch) {
|
||||
uint256 _chunksLength = _chunks.length;
|
||||
|
||||
// compute batch hash
|
||||
_batchHash = BatchHeaderV0Codec.computeBatchHash(memPtr, _length);
|
||||
// load `batchDataHashPtr` and reserve the memory region for chunk data hashes
|
||||
uint256 batchDataHashPtr;
|
||||
assembly {
|
||||
batchDataHashPtr := mload(0x40)
|
||||
mstore(0x40, add(batchDataHashPtr, mul(_chunksLength, 32)))
|
||||
}
|
||||
|
||||
// compute the data hash for each chunk
|
||||
for (uint256 i = 0; i < _chunksLength; i++) {
|
||||
uint256 _totalNumL1MessagesInChunk;
|
||||
bytes32 _chunkDataHash;
|
||||
(_chunkDataHash, _totalNumL1MessagesInChunk) = _commitChunkV0(
|
||||
_chunks[i],
|
||||
_totalL1MessagesPoppedInBatch,
|
||||
_totalL1MessagesPoppedOverall,
|
||||
_skippedL1MessageBitmap
|
||||
);
|
||||
unchecked {
|
||||
_totalL1MessagesPoppedInBatch += _totalNumL1MessagesInChunk;
|
||||
_totalL1MessagesPoppedOverall += _totalNumL1MessagesInChunk;
|
||||
}
|
||||
assembly {
|
||||
mstore(batchDataHashPtr, _chunkDataHash)
|
||||
batchDataHashPtr := add(batchDataHashPtr, 0x20)
|
||||
}
|
||||
}
|
||||
|
||||
assembly {
|
||||
let dataLen := mul(_chunksLength, 0x20)
|
||||
_batchDataHash := keccak256(sub(batchDataHashPtr, dataLen), dataLen)
|
||||
}
|
||||
}
|
||||
|
||||
/// @dev Internal function to commit a chunk.
|
||||
/// @param memPtr The start memory offset to store list of `dataHash`.
|
||||
/// @param _chunk The encoded chunk to commit.
|
||||
/// @param _totalL1MessagesPoppedInBatch The total number of L1 messages popped in current batch.
|
||||
/// @param _totalL1MessagesPoppedOverall The total number of L1 messages popped in all batches including current batch.
|
||||
/// @dev Internal function to commit chunks with version 1
|
||||
/// @param _totalL1MessagesPoppedOverall The number of L1 messages popped before the list of chunks.
|
||||
/// @param _chunks The list of chunks to commit.
|
||||
/// @param _skippedL1MessageBitmap The bitmap indicates whether each L1 message is skipped or not.
|
||||
/// @return _blobVersionedHash The blob versioned hash for the blob carried in this transaction.
|
||||
/// @return _batchDataHash The computed data hash for the list of chunks.
|
||||
/// @return _totalL1MessagesPoppedInBatch The total number of L1 messages poped in this batch, including skipped one.
|
||||
function _commitChunksV1(
|
||||
uint256 _totalL1MessagesPoppedOverall,
|
||||
bytes[] memory _chunks,
|
||||
bytes calldata _skippedL1MessageBitmap
|
||||
)
|
||||
internal
|
||||
view
|
||||
returns (
|
||||
bytes32 _blobVersionedHash,
|
||||
bytes32 _batchDataHash,
|
||||
uint256 _totalL1MessagesPoppedInBatch
|
||||
)
|
||||
{
|
||||
{
|
||||
bytes32 _secondBlob;
|
||||
// Get blob's versioned hash
|
||||
assembly {
|
||||
_blobVersionedHash := blobhash(0)
|
||||
_secondBlob := blobhash(1)
|
||||
}
|
||||
if (_blobVersionedHash == bytes32(0)) revert ErrorNoBlobFound();
|
||||
if (_secondBlob != bytes32(0)) revert ErrorFoundMultipleBlob();
|
||||
}
|
||||
|
||||
uint256 _chunksLength = _chunks.length;
|
||||
|
||||
// load `batchDataHashPtr` and reserve the memory region for chunk data hashes
|
||||
uint256 batchDataHashPtr;
|
||||
assembly {
|
||||
batchDataHashPtr := mload(0x40)
|
||||
mstore(0x40, add(batchDataHashPtr, mul(_chunksLength, 32)))
|
||||
}
|
||||
|
||||
// compute the data hash for each chunk
|
||||
for (uint256 i = 0; i < _chunksLength; i++) {
|
||||
uint256 _totalNumL1MessagesInChunk;
|
||||
bytes32 _chunkDataHash;
|
||||
(_chunkDataHash, _totalNumL1MessagesInChunk) = _commitChunkV1(
|
||||
_chunks[i],
|
||||
_totalL1MessagesPoppedInBatch,
|
||||
_totalL1MessagesPoppedOverall,
|
||||
_skippedL1MessageBitmap
|
||||
);
|
||||
unchecked {
|
||||
_totalL1MessagesPoppedInBatch += _totalNumL1MessagesInChunk;
|
||||
_totalL1MessagesPoppedOverall += _totalNumL1MessagesInChunk;
|
||||
}
|
||||
assembly {
|
||||
mstore(batchDataHashPtr, _chunkDataHash)
|
||||
batchDataHashPtr := add(batchDataHashPtr, 0x20)
|
||||
}
|
||||
}
|
||||
|
||||
// compute the data hash for current batch
|
||||
assembly {
|
||||
let dataLen := mul(_chunksLength, 0x20)
|
||||
_batchDataHash := keccak256(sub(batchDataHashPtr, dataLen), dataLen)
|
||||
}
|
||||
}
|
||||
|
||||
/// @dev Internal function to load batch header from calldata to memory.
|
||||
/// @param _batchHeader The batch header in calldata.
|
||||
/// @return batchPtr The start memory offset of loaded batch header.
|
||||
/// @return _batchHash The hash of the loaded batch header.
|
||||
/// @return _batchIndex The index of this batch.
|
||||
/// @param _totalL1MessagesPoppedOverall The number of L1 messages popped after this batch.
|
||||
function _loadBatchHeader(bytes calldata _batchHeader)
|
||||
internal
|
||||
view
|
||||
returns (
|
||||
uint256 batchPtr,
|
||||
bytes32 _batchHash,
|
||||
uint256 _batchIndex,
|
||||
uint256 _totalL1MessagesPoppedOverall
|
||||
)
|
||||
{
|
||||
// load version from batch header, it is always the first byte.
|
||||
uint256 version;
|
||||
assembly {
|
||||
version := shr(248, calldataload(_batchHeader.offset))
|
||||
}
|
||||
|
||||
// version should be always 0 or 1 in current code
|
||||
uint256 _length;
|
||||
if (version == 0) {
|
||||
(batchPtr, _length) = BatchHeaderV0Codec.loadAndValidate(_batchHeader);
|
||||
_batchHash = BatchHeaderV0Codec.computeBatchHash(batchPtr, _length);
|
||||
_batchIndex = BatchHeaderV0Codec.getBatchIndex(batchPtr);
|
||||
} else if (version == 1) {
|
||||
(batchPtr, _length) = BatchHeaderV1Codec.loadAndValidate(_batchHeader);
|
||||
_batchHash = BatchHeaderV1Codec.computeBatchHash(batchPtr, _length);
|
||||
_batchIndex = BatchHeaderV1Codec.getBatchIndex(batchPtr);
|
||||
} else {
|
||||
revert ErrorInvalidBatchHeaderVersion();
|
||||
}
|
||||
// only check when genesis is imported
|
||||
if (committedBatches[_batchIndex] != _batchHash && finalizedStateRoots[0] != bytes32(0)) {
|
||||
revert ErrorIncorrectBatchHash();
|
||||
}
|
||||
_totalL1MessagesPoppedOverall = BatchHeaderV0Codec.getTotalL1MessagePopped(batchPtr);
|
||||
}
|
||||
|
||||
/// @dev Internal function to commit a chunk with version 0.
|
||||
/// @param _chunk The encoded chunk to commit.
|
||||
/// @param _totalL1MessagesPoppedInBatch The total number of L1 messages popped in the current batch before this chunk.
|
||||
/// @param _totalL1MessagesPoppedOverall The total number of L1 messages popped in all batches including the current batch, before this chunk.
|
||||
/// @param _skippedL1MessageBitmap The bitmap indicates whether each L1 message is skipped or not.
|
||||
/// @return _dataHash The computed data hash for this chunk.
|
||||
/// @return _totalNumL1MessagesInChunk The total number of L1 message popped in current chunk
|
||||
function _commitChunk(
|
||||
uint256 memPtr,
|
||||
function _commitChunkV0(
|
||||
bytes memory _chunk,
|
||||
uint256 _totalL1MessagesPoppedInBatch,
|
||||
uint256 _totalL1MessagesPoppedOverall,
|
||||
bytes calldata _skippedL1MessageBitmap
|
||||
) internal view returns (uint256 _totalNumL1MessagesInChunk) {
|
||||
) internal view returns (bytes32 _dataHash, uint256 _totalNumL1MessagesInChunk) {
|
||||
uint256 chunkPtr;
|
||||
uint256 startDataPtr;
|
||||
uint256 dataPtr;
|
||||
uint256 blockPtr;
|
||||
|
||||
assembly {
|
||||
dataPtr := mload(0x40)
|
||||
startDataPtr := dataPtr
|
||||
chunkPtr := add(_chunk, 0x20) // skip chunkLength
|
||||
blockPtr := add(chunkPtr, 1) // skip numBlocks
|
||||
}
|
||||
|
||||
uint256 _numBlocks = ChunkCodec.validateChunkLength(chunkPtr, _chunk.length);
|
||||
uint256 _numBlocks = ChunkCodecV0.validateChunkLength(chunkPtr, _chunk.length);
|
||||
|
||||
// concatenate block contexts, use scope to avoid stack too deep
|
||||
{
|
||||
uint256 _totalTransactionsInChunk;
|
||||
for (uint256 i = 0; i < _numBlocks; i++) {
|
||||
dataPtr = ChunkCodec.copyBlockContext(chunkPtr, dataPtr, i);
|
||||
uint256 _numTransactionsInBlock = ChunkCodec.numTransactions(blockPtr);
|
||||
dataPtr = ChunkCodecV0.copyBlockContext(chunkPtr, dataPtr, i);
|
||||
uint256 blockPtr = chunkPtr + 1 + i * ChunkCodecV0.BLOCK_CONTEXT_LENGTH;
|
||||
uint256 _numTransactionsInBlock = ChunkCodecV0.getNumTransactions(blockPtr);
|
||||
unchecked {
|
||||
_totalTransactionsInChunk += _numTransactionsInBlock;
|
||||
blockPtr += ChunkCodec.BLOCK_CONTEXT_LENGTH;
|
||||
}
|
||||
}
|
||||
assembly {
|
||||
@@ -499,17 +770,13 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
}
|
||||
|
||||
// It is used to compute the actual number of transactions in chunk.
|
||||
uint256 txHashStartDataPtr;
|
||||
assembly {
|
||||
txHashStartDataPtr := dataPtr
|
||||
blockPtr := add(chunkPtr, 1) // reset block ptr
|
||||
}
|
||||
|
||||
uint256 txHashStartDataPtr = dataPtr;
|
||||
// concatenate tx hashes
|
||||
uint256 l2TxPtr = ChunkCodec.l2TxPtr(chunkPtr, _numBlocks);
|
||||
uint256 l2TxPtr = ChunkCodecV0.getL2TxPtr(chunkPtr, _numBlocks);
|
||||
chunkPtr += 1;
|
||||
while (_numBlocks > 0) {
|
||||
// concatenate l1 message hashes
|
||||
uint256 _numL1MessagesInBlock = ChunkCodec.numL1Messages(blockPtr);
|
||||
uint256 _numL1MessagesInBlock = ChunkCodecV0.getNumL1Messages(chunkPtr);
|
||||
dataPtr = _loadL1MessageHashes(
|
||||
dataPtr,
|
||||
_numL1MessagesInBlock,
|
||||
@@ -519,11 +786,11 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
);
|
||||
|
||||
// concatenate l2 transaction hashes
|
||||
uint256 _numTransactionsInBlock = ChunkCodec.numTransactions(blockPtr);
|
||||
require(_numTransactionsInBlock >= _numL1MessagesInBlock, "num txs less than num L1 msgs");
|
||||
uint256 _numTransactionsInBlock = ChunkCodecV0.getNumTransactions(chunkPtr);
|
||||
if (_numTransactionsInBlock < _numL1MessagesInBlock) revert ErrorNumTxsLessThanNumL1Msgs();
|
||||
for (uint256 j = _numL1MessagesInBlock; j < _numTransactionsInBlock; j++) {
|
||||
bytes32 txHash;
|
||||
(txHash, l2TxPtr) = ChunkCodec.loadL2TxHash(l2TxPtr);
|
||||
(txHash, l2TxPtr) = ChunkCodecV0.loadL2TxHash(l2TxPtr);
|
||||
assembly {
|
||||
mstore(dataPtr, txHash)
|
||||
dataPtr := add(dataPtr, 0x20)
|
||||
@@ -536,23 +803,99 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
_totalL1MessagesPoppedOverall += _numL1MessagesInBlock;
|
||||
|
||||
_numBlocks -= 1;
|
||||
blockPtr += ChunkCodec.BLOCK_CONTEXT_LENGTH;
|
||||
chunkPtr += ChunkCodecV0.BLOCK_CONTEXT_LENGTH;
|
||||
}
|
||||
}
|
||||
|
||||
// check the actual number of transactions in the chunk
|
||||
require((dataPtr - txHashStartDataPtr) / 32 <= maxNumTxInChunk, "too many txs in one chunk");
|
||||
if ((dataPtr - txHashStartDataPtr) / 32 > maxNumTxInChunk) revert ErrorTooManyTxsInOneChunk();
|
||||
|
||||
assembly {
|
||||
chunkPtr := add(_chunk, 0x20)
|
||||
}
|
||||
// check chunk has correct length
|
||||
require(l2TxPtr - chunkPtr == _chunk.length, "incomplete l2 transaction data");
|
||||
if (l2TxPtr - chunkPtr != _chunk.length) revert ErrorIncompleteL2TransactionData();
|
||||
|
||||
// compute data hash and store to memory
|
||||
assembly {
|
||||
let dataHash := keccak256(startDataPtr, sub(dataPtr, startDataPtr))
|
||||
mstore(memPtr, dataHash)
|
||||
_dataHash := keccak256(startDataPtr, sub(dataPtr, startDataPtr))
|
||||
}
|
||||
}
|
||||
|
||||
/// @dev Internal function to commit a chunk with version 1.
|
||||
/// @param _chunk The encoded chunk to commit.
|
||||
/// @param _totalL1MessagesPoppedInBatch The total number of L1 messages popped in current batch.
|
||||
/// @param _totalL1MessagesPoppedOverall The total number of L1 messages popped in all batches including current batch.
|
||||
/// @param _skippedL1MessageBitmap The bitmap indicates whether each L1 message is skipped or not.
|
||||
/// @return _dataHash The computed data hash for this chunk.
|
||||
/// @return _totalNumL1MessagesInChunk The total number of L1 message popped in current chunk
|
||||
function _commitChunkV1(
|
||||
bytes memory _chunk,
|
||||
uint256 _totalL1MessagesPoppedInBatch,
|
||||
uint256 _totalL1MessagesPoppedOverall,
|
||||
bytes calldata _skippedL1MessageBitmap
|
||||
) internal view returns (bytes32 _dataHash, uint256 _totalNumL1MessagesInChunk) {
|
||||
uint256 chunkPtr;
|
||||
uint256 startDataPtr;
|
||||
uint256 dataPtr;
|
||||
|
||||
assembly {
|
||||
dataPtr := mload(0x40)
|
||||
startDataPtr := dataPtr
|
||||
chunkPtr := add(_chunk, 0x20) // skip chunkLength
|
||||
}
|
||||
|
||||
return _totalNumL1MessagesInChunk;
|
||||
uint256 _numBlocks = ChunkCodecV1.validateChunkLength(chunkPtr, _chunk.length);
|
||||
// concatenate block contexts, use scope to avoid stack too deep
|
||||
for (uint256 i = 0; i < _numBlocks; i++) {
|
||||
dataPtr = ChunkCodecV1.copyBlockContext(chunkPtr, dataPtr, i);
|
||||
uint256 blockPtr = chunkPtr + 1 + i * ChunkCodecV1.BLOCK_CONTEXT_LENGTH;
|
||||
uint256 _numL1MessagesInBlock = ChunkCodecV1.getNumL1Messages(blockPtr);
|
||||
unchecked {
|
||||
_totalNumL1MessagesInChunk += _numL1MessagesInBlock;
|
||||
}
|
||||
}
|
||||
assembly {
|
||||
mstore(0x40, add(dataPtr, mul(_totalNumL1MessagesInChunk, 0x20))) // reserve memory for l1 message hashes
|
||||
chunkPtr := add(chunkPtr, 1)
|
||||
}
|
||||
|
||||
// the number of actual transactions in one chunk: non-skipped l1 messages + l2 txs
|
||||
uint256 _totalTransactionsInChunk;
|
||||
// concatenate tx hashes
|
||||
while (_numBlocks > 0) {
|
||||
// concatenate l1 message hashes
|
||||
uint256 _numL1MessagesInBlock = ChunkCodecV1.getNumL1Messages(chunkPtr);
|
||||
uint256 startPtr = dataPtr;
|
||||
dataPtr = _loadL1MessageHashes(
|
||||
dataPtr,
|
||||
_numL1MessagesInBlock,
|
||||
_totalL1MessagesPoppedInBatch,
|
||||
_totalL1MessagesPoppedOverall,
|
||||
_skippedL1MessageBitmap
|
||||
);
|
||||
uint256 _numTransactionsInBlock = ChunkCodecV1.getNumTransactions(chunkPtr);
|
||||
if (_numTransactionsInBlock < _numL1MessagesInBlock) revert ErrorNumTxsLessThanNumL1Msgs();
|
||||
unchecked {
|
||||
_totalTransactionsInChunk += dataPtr - startPtr; // number of non-skipped l1 messages
|
||||
_totalTransactionsInChunk += _numTransactionsInBlock - _numL1MessagesInBlock; // number of l2 txs
|
||||
_totalL1MessagesPoppedInBatch += _numL1MessagesInBlock;
|
||||
_totalL1MessagesPoppedOverall += _numL1MessagesInBlock;
|
||||
|
||||
_numBlocks -= 1;
|
||||
chunkPtr += ChunkCodecV1.BLOCK_CONTEXT_LENGTH;
|
||||
}
|
||||
}
|
||||
|
||||
// check the actual number of transactions in the chunk
|
||||
if (_totalTransactionsInChunk > maxNumTxInChunk) {
|
||||
revert ErrorTooManyTxsInOneChunk();
|
||||
}
|
||||
|
||||
// compute data hash and store to memory
|
||||
assembly {
|
||||
_dataHash := keccak256(startDataPtr, sub(dataPtr, startDataPtr))
|
||||
}
|
||||
}
|
||||
|
||||
/// @dev Internal function to load L1 message hashes from the message queue.
|
||||
@@ -600,9 +943,39 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
|
||||
// check last L1 message is not skipped, _totalL1MessagesPoppedInBatch must > 0
|
||||
rem = (_totalL1MessagesPoppedInBatch - 1) & 0xff;
|
||||
require(((_bitmap >> rem) & 1) == 0, "cannot skip last L1 message");
|
||||
if (((_bitmap >> rem) & 1) > 0) revert ErrorLastL1MessageSkipped();
|
||||
}
|
||||
|
||||
return _ptr;
|
||||
}
|
||||
|
||||
/// @dev Internal function to pop finalized l1 messages.
|
||||
/// @param bitmapPtr The memory offset of `skippedL1MessageBitmap`.
|
||||
/// @param totalL1MessagePopped The total number of L1 messages poped in all batches including current batch.
|
||||
/// @param l1MessagePopped The number of L1 messages popped in current batch.
|
||||
function _popL1Messages(
|
||||
uint256 bitmapPtr,
|
||||
uint256 totalL1MessagePopped,
|
||||
uint256 l1MessagePopped
|
||||
) internal {
|
||||
if (l1MessagePopped == 0) return;
|
||||
|
||||
unchecked {
|
||||
uint256 startIndex = totalL1MessagePopped - l1MessagePopped;
|
||||
uint256 bitmap;
|
||||
|
||||
for (uint256 i = 0; i < l1MessagePopped; i += 256) {
|
||||
uint256 _count = 256;
|
||||
if (l1MessagePopped - i < _count) {
|
||||
_count = l1MessagePopped - i;
|
||||
}
|
||||
assembly {
|
||||
bitmap := mload(bitmapPtr)
|
||||
bitmapPtr := add(bitmapPtr, 0x20)
|
||||
}
|
||||
IL1MessageQueue(messageQueue).popCrossDomainMessage(startIndex, _count, bitmap);
|
||||
startIndex += 256;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity =0.8.16;
|
||||
pragma solidity =0.8.24;
|
||||
|
||||
import {IScrollChain} from "./IScrollChain.sol";
|
||||
import {ZkTrieVerifier} from "../../libraries/verifier/ZkTrieVerifier.sol";
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user