Compare commits

...

38 Commits

Author SHA1 Message Date
ChuhanJin
8d667f9353 fix(bridge-history-api): fix wrong table name (#584)
Co-authored-by: vincent <419436363@qq.com>
2023-06-16 21:23:38 +08:00
Xi Lin
57a058c516 feat(contracts): add multiple version for rollup verifier (#549)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-06-16 19:50:13 +08:00
ChuhanJin
55612a0dbb fix(bridge-history-api): Avoid cors issue in bridgehistoryapi-server (#583)
Co-authored-by: vincent <419436363@qq.com>
2023-06-16 19:42:12 +08:00
Richord
a8b2706752 feat(batch proposer): implemented l1msgtx fields in batch header (#567)
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
2023-06-16 10:54:24 +02:00
georgehao
d9ae117548 docs: update README.md (#578)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-06-16 15:21:16 +08:00
ChuhanJin
de2669da2b fix(bridge-history-api): upgrade and fix misc issues (#580)
Co-authored-by: vincent <419436363@qq.com>
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
Co-authored-by: georgehao <haohongfan@gmail.com>
2023-06-16 14:53:45 +08:00
HAOYUatHZ
b1d7654970 docs: update README.md (#577) 2023-06-16 09:07:14 +08:00
HAOYUatHZ
a6164046e1 build: clean up jenkins related files (#576) 2023-06-16 09:05:52 +08:00
georgehao
119e62d4b1 feat(codecov): Add scroll unit test coverage tool (#574) 2023-06-15 22:53:56 +08:00
Xi Lin
16e0cbf542 fix(contracts): bug fixing based on openzeppelin's audit (#558)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-06-15 17:17:32 +08:00
Péter Garamvölgyi
bfaf2fd0e2 feat(batch proposer): Support skipping L1 messages in block/chunk encoding (#569) 2023-06-15 09:49:01 +02:00
Xi Lin
ea227b5c85 feat(bridge): add function to decode batch range from calldata with old abi (#568)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-06-15 14:28:16 +08:00
Richord
86b67f6dad feat(batch proposer): chunk encoding using L1MsgTxs (#557)
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
2023-06-13 10:08:07 -07:00
georgehao
711d17d1e8 fix(bridge): fix l2 watcher tests (#565)
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
2023-06-13 21:11:03 +08:00
Xi Lin
87c81c6555 feat(contracts): add prover role in scroll chain (#559)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-06-13 21:03:56 +08:00
ChuhanJin
7118746e34 fix(bridge-history-api): change hostport as configurable to support and misc fixes (#564)
Co-authored-by: vincent <419436363@qq.com>
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-06-13 20:23:28 +08:00
Péter Garamvölgyi
f180a40ed9 fix(batch proposer): limit the number of batches in a single commitBatches transaction (#555)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-06-09 23:27:29 +08:00
Richord
fc577a535e feat(batch proposer): Batch header encoding and hashing without L1MessageTx (#543)
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-06-09 23:21:35 +08:00
Péter Garamvölgyi
8b018327f2 feat(contracts): add fallback contract deployment script (#556) 2023-06-09 23:12:22 +08:00
HAOYUatHZ
98cb624951 ci: fix intermediate docker Makefile (#554) 2023-06-09 14:14:46 +08:00
HAOYUatHZ
9dba3f8f41 ci: update intermediate dockerfiles (#553) 2023-06-09 13:50:54 +08:00
Xi Lin
fb44382297 fix(contracts): fix chunk hash computation (#546)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-06-08 08:56:00 +08:00
maskpp
2a745ad7a9 build: Update golang-ci version to v1.52.2 and go version to v1.19.x (#525)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
Co-authored-by: HAOYUatHZ <haoyu@protonmail.com>
2023-06-07 23:07:14 +08:00
Max Wolff
7270a2a9bc style(contracts): add linter precommit hook for .ts and .sol files (#484)
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
2023-06-07 13:21:35 +02:00
HAOYUatHZ
20fafd9ca4 docs: remove workflow status badges (#547) 2023-06-07 16:11:28 +08:00
HAOYUatHZ
870565a18f docs: add Integration Tests & Bridge History API CI badges to README (#545) 2023-06-07 13:56:30 +08:00
HAOYUatHZ
0319b11a57 refactor: de-duplicate library path (#544) 2023-06-07 13:41:37 +08:00
HAOYUatHZ
6d4374a5cc ci: move tests from jenkins to github (#535)
Co-authored-by: maskpp <maskpp266@gmail.com>
2023-06-07 13:36:36 +08:00
Richord
7ca897d887 feat(batch proposer): implement chunk hashing (#536)
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
Co-authored-by: georgehao <haohongfan@gmail.com>
2023-06-06 10:51:37 -07:00
HAOYUatHZ
f414523045 fix: fix github "Intermediate Docker" workflow syntax (#541) 2023-06-06 10:36:04 +08:00
HAOYUatHZ
99ba4acdc0 fix: fix github "Intermediate Docker" workflow (#540) 2023-06-06 10:27:25 +08:00
HAOYUatHZ
9caa3d491e build: add custom build scripts for intermediate dockers (#538) 2023-06-06 10:15:50 +08:00
ChuhanJin
81589c55f5 feat(bridge-history-api): add blocktimestamp into cross_msg (#526)
Co-authored-by: vincent <419436363@qq.com>
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-06-05 22:52:42 +08:00
ChuhanJin
d234156475 feat(bridge-history-api): add withdrawtrie (#537)
Co-authored-by: vincent <419436363@qq.com>
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-06-05 22:45:21 +08:00
Ahmed Castro
62f7cbad46 refactor: add missing functions into IL1GatewayRouter (#533)
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
2023-06-05 11:36:51 +02:00
Richord
186e76852b feat(batch proposer): implement chunk encoding (#519) 2023-06-04 08:35:48 -07:00
Péter Garamvölgyi
3b045817dd ci: update PR template (#534) 2023-06-04 11:08:50 +08:00
ChuhanJin
e7dc628563 fix(jenkins): fix can not find gocover (#529)
Co-authored-by: vincent <419436363@qq.com>
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-05-31 21:23:58 +08:00
116 changed files with 14685 additions and 800 deletions

View File

@@ -1,9 +1,9 @@
## 1. Purpose or design rationale of this PR
### Purpose or design rationale of this PR
...
*Describe your change. Make sure to answer these three questions: What does this PR do? Why does it do it? How does it do it?*
## 2. PR title
### PR title
Your PR title must follow [conventional commits](https://www.conventionalcommits.org/en/v1.0.0/#summary) (as we are doing squash merge for each PR), so it must start with one of the following [types](https://github.com/angular/angular/blob/22b96b9/CONTRIBUTING.md#type):
@@ -18,17 +18,17 @@ Your PR title must follow [conventional commits](https://www.conventionalcommits
- [ ] test: Adding missing tests or correcting existing tests
## 3. Deployment tag versioning
### Deployment tag versioning
Has `tag` in `common/version.go` been updated?
- [ ] This PR doesn't involve a new deployment, git tag, docker image tag
- [ ] No, this PR doesn't involve a new deployment, git tag, docker image tag
- [ ] Yes
## 4. Breaking change label
### Breaking change label
Does this PR have the `breaking-change` label?
- [ ] This PR is not a breaking change
- [ ] No, this PR is not a breaking change
- [ ] Yes

View File

@@ -9,6 +9,8 @@ on:
- alpha
paths:
- 'bridge/**'
- 'common/**'
- 'database/**'
- '.github/workflows/bridge.yml'
pull_request:
types:
@@ -18,14 +20,61 @@ on:
- ready_for_review
paths:
- 'bridge/**'
- 'common/**'
- 'database/**'
- '.github/workflows/bridge.yml'
defaults:
run:
working-directory: 'bridge'
jobs:
check:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.19.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install Solc
uses: supplypike/setup-bin@v3
with:
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.16/solc-static-linux'
name: 'solc'
version: '0.8.16'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
- name: Lint
working-directory: 'bridge'
run: |
rm -rf $HOME/.cache/golangci-lint
make mock_abi
make lint
goimports-lint:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.19.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install goimports
run: go install golang.org/x/tools/cmd/goimports
- name: Run goimports lint
run: goimports -local scroll-tech/bridge/ -w .
working-directory: 'bridge'
- name: Run go mod tidy
run: go mod tidy
working-directory: 'bridge'
# If there are any diffs from goimports or go mod tidy, fail.
- name: Verify no changes from goimports and go mod tidy
working-directory: 'bridge'
run: |
if [ -n "$(git status --porcelain)" ]; then
exit 1
fi
tests:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
@@ -43,31 +92,24 @@ jobs:
version: '0.8.16'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
- name: Lint
- name: Build prerequisites
run: |
rm -rf $HOME/.cache/golangci-lint
make mock_abi
make lint
goimports-lint:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
make dev_docker
make -C bridge mock_abi
- name: Build bridge binaries
working-directory: 'bridge'
run: |
make bridge_bins
- name: Test bridge packages
working-directory: 'bridge'
run: |
go test -v -race -gcflags="-l" -ldflags="-s=false" -coverprofile=coverage.txt -covermode=atomic ./...
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
go-version: 1.18.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install goimports
run: go install golang.org/x/tools/cmd/goimports
- run: goimports -local scroll-tech/bridge/ -w .
- run: go mod tidy
# If there are any diffs from goimports or go mod tidy, fail.
- name: Verify no changes from goimports and go mod tidy
run: |
if [ -n "$(git status --porcelain)" ]; then
exit 1
fi
flags: bridge
# docker-build:
# if: github.event.pull_request.draft == false
# runs-on: ubuntu-latest

View File

@@ -1,4 +1,4 @@
name: BridgeHistoryApi
name: BridgeHistoryAPI
on:
push:
@@ -32,7 +32,7 @@ jobs:
# - name: Install Go
# uses: actions/setup-go@v2
# with:
# go-version: 1.20.x
# go-version: 1.19.x
# - name: Checkout code
# uses: actions/checkout@v2
# - name: Lint
@@ -46,13 +46,19 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.20.x
go-version: 1.19.x
- name: Checkout code
uses: actions/checkout@v2
- name: Test
run: |
go get ./...
make test
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: bridge-history-api
goimports-lint:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
@@ -60,7 +66,7 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.20.x
go-version: 1.19.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install goimports
@@ -73,4 +79,3 @@ jobs:
if [ -n "$(git status --porcelain)" ]; then
exit 1
fi

View File

@@ -20,10 +20,6 @@ on:
- 'common/**'
- '.github/workflows/common.yml'
defaults:
run:
working-directory: 'common'
jobs:
check:
if: github.event.pull_request.draft == false
@@ -37,7 +33,7 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.18.x
go-version: 1.19.x
- name: Checkout code
uses: actions/checkout@v2
- name: Cache cargo
@@ -45,6 +41,7 @@ jobs:
with:
workspaces: "common/libzkp/impl -> target"
- name: Lint
working-directory: 'common'
run: |
rm -rf $HOME/.cache/golangci-lint
make lint
@@ -55,16 +52,52 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.18.x
go-version: 1.19.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install goimports
run: go install golang.org/x/tools/cmd/goimports
- run: goimports -local scroll-tech/common/ -w .
- run: go mod tidy
- name: Run goimports lint
working-directory: 'common'
run: goimports -local scroll-tech/common/ -w .
- name: Run go mod tidy
working-directory: 'common'
run: go mod tidy
# If there are any diffs from goimports or go mod tidy, fail.
- name: Verify no changes from goimports and go mod tidy
working-directory: 'common'
run: |
if [ -n "$(git status --porcelain)" ]; then
exit 1
fi
tests:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.18.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install Solc
uses: supplypike/setup-bin@v3
with:
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.16/solc-static-linux'
name: 'solc'
version: '0.8.16'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
- name: Build prerequisites
run: |
make dev_docker
- name: Test common packages
working-directory: 'common'
run: |
go test -v -race -gcflags="-l" -ldflags="-s=false" -coverprofile=coverage.txt -covermode=atomic ./...
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: common

View File

@@ -9,6 +9,8 @@ on:
- alpha
paths:
- 'coordinator/**'
- 'common/**'
- 'database/**'
- '.github/workflows/coordinator.yml'
pull_request:
types:
@@ -18,12 +20,10 @@ on:
- ready_for_review
paths:
- 'coordinator/**'
- 'common/**'
- 'database/**'
- '.github/workflows/coordinator.yml'
defaults:
run:
working-directory: 'coordinator'
jobs:
check:
if: github.event.pull_request.draft == false
@@ -37,10 +37,11 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.18.x
go-version: 1.19.x
- name: Checkout code
uses: actions/checkout@v2
- name: Lint
working-directory: 'coordinator'
run: |
rm -rf $HOME/.cache/golangci-lint
make lint
@@ -51,15 +52,20 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.18.x
go-version: 1.19.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install goimports
run: go install golang.org/x/tools/cmd/goimports
- run: goimports -local scroll-tech/coordinator/ -w .
- run: go mod tidy
- name: Run goimports lint
working-directory: 'coordinator'
run: goimports -local scroll-tech/coordinator/ -w .
- name: Run go mod tidy
working-directory: 'coordinator'
run: go mod tidy
# If there are any diffs from goimports or go mod tidy, fail.
- name: Verify no changes from goimports and go mod tidy
working-directory: 'coordinator'
run: |
if [ -n "$(git status --porcelain)" ]; then
exit 1
@@ -80,3 +86,35 @@ jobs:
# push: false
# # cache-from: type=gha,scope=${{ github.workflow }}
# # cache-to: type=gha,scope=${{ github.workflow }}
tests:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.18.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install Solc
uses: supplypike/setup-bin@v3
with:
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.16/solc-static-linux'
name: 'solc'
version: '0.8.16'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
- name: Build prerequisites
run: |
make dev_docker
- name: Test coordinator packages
working-directory: 'coordinator'
run: |
# go test -exec "env LD_LIBRARY_PATH=${PWD}/verifier/lib" -v -race -gcflags="-l" -ldflags="-s=false" -coverpkg="scroll-tech/coordinator" -coverprofile=coverage.txt -covermode=atomic ./...
go test -v -race -gcflags="-l" -ldflags="-s=false" -coverprofile=coverage.txt -covermode=atomic -tags mock_verifier ./...
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: coordinator

View File

@@ -9,6 +9,7 @@ on:
- alpha
paths:
- 'database/**'
- 'common/**'
- '.github/workflows/database.yml'
pull_request:
types:
@@ -18,12 +19,9 @@ on:
- ready_for_review
paths:
- 'database/**'
- 'common/**'
- '.github/workflows/database.yml'
defaults:
run:
working-directory: 'database'
jobs:
check:
if: github.event.pull_request.draft == false
@@ -32,10 +30,11 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.18.x
go-version: 1.19.x
- name: Checkout code
uses: actions/checkout@v2
- name: Lint
working-directory: 'database'
run: |
rm -rf $HOME/.cache/golangci-lint
make lint
@@ -46,16 +45,52 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.18.x
go-version: 1.19.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install goimports
run: go install golang.org/x/tools/cmd/goimports
- run: goimports -local scroll-tech/database/ -w .
- run: go mod tidy
- name: Run goimports lint
working-directory: 'database'
run: goimports -local scroll-tech/database/ -w .
- name: Run go mod tidy
working-directory: 'database'
run: go mod tidy
# If there are any diffs from goimports or go mod tidy, fail.
- name: Verify no changes from goimports and go mod tidy
working-directory: 'database'
run: |
if [ -n "$(git status --porcelain)" ]; then
exit 1
fi
tests:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.18.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install Solc
uses: supplypike/setup-bin@v3
with:
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.16/solc-static-linux'
name: 'solc'
version: '0.8.16'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
- name: Build prerequisites
run: |
make dev_docker
- name: Test database packages
working-directory: 'database'
run: |
go test -v -race -gcflags="-l" -ldflags="-s=false" -coverprofile=coverage.txt -covermode=atomic ./...
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: database

43
.github/workflows/integration.yaml vendored Normal file
View File

@@ -0,0 +1,43 @@
name: Integration
on:
push:
branches:
- main
- staging
- develop
- alpha
pull_request:
types:
- opened
- reopened
- synchronize
- ready_for_review
jobs:
tests:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.18.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install Solc
uses: supplypike/setup-bin@v3
with:
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.16/solc-static-linux'
name: 'solc'
version: '0.8.16'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
- name: Build prerequisites
run: |
make dev_docker
make -C bridge mock_abi
make -C common/bytecode all
- name: Run integration tests
run: |
go test -v -tags="mock_prover mock_verifier" -p 1 -coverprofile=coverage.txt scroll-tech/integration-test/...

View File

@@ -0,0 +1,59 @@
name: Intermediate Docker
on:
workflow_dispatch:
inputs:
GO_VERSION:
description: 'Go version'
required: true
type: string
default: '1.19'
RUST_VERSION:
description: 'Rust toolchain version'
required: true
type: string
default: 'nightly-2022-12-10'
PYTHON_VERSION:
description: 'Python version'
required: false
type: string
default: '3.10'
CUDA_VERSION:
description: 'Cuda version'
required: false
type: string
default: '11.7.1'
defaults:
run:
working-directory: 'build/dockerfiles/intermediate'
jobs:
build-and-push:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build
run: |
make all
env:
GO_VERSION: ${{ inputs.GO_VERSION }}
RUST_VERSION: ${{ inputs.RUST_VERSION }}
PYTHON_VERSION: ${{ inputs.PYTHON_VERSION }}
CUDA_VERSION: ${{ inputs.CUDA_VERSION }}
- name: Publish
run: |
make publish
env:
GO_VERSION: ${{ inputs.GO_VERSION }}
RUST_VERSION: ${{ inputs.RUST_VERSION }}
PYTHON_VERSION: ${{ inputs.PYTHON_VERSION }}
CUDA_VERSION: ${{ inputs.CUDA_VERSION }}

View File

@@ -37,7 +37,7 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.18.x
go-version: 1.19.x
- name: Checkout code
uses: actions/checkout@v2
- name: Cache cargo
@@ -47,7 +47,13 @@ jobs:
- name: Test
run: |
make roller
go test -tags="mock_prover" -v ./...
go test -tags="mock_prover" -v -coverprofile=coverage.txt ./...
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: roller
check:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
@@ -55,7 +61,7 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.18.x
go-version: 1.19.x
- name: Checkout code
uses: actions/checkout@v2
- name: Lint
@@ -69,7 +75,7 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.18.x
go-version: 1.19.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install goimports

99
Jenkinsfile vendored
View File

@@ -1,99 +0,0 @@
imagePrefix = 'scrolltech'
credentialDocker = 'dockerhub'
pipeline {
agent any
options {
timeout (20)
}
tools {
go 'go-1.18'
nodejs "nodejs"
}
environment {
GO111MODULE = 'on'
PATH="/home/ubuntu/.cargo/bin:$PATH"
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:./coordinator/verifier/lib"
CHAIN_ID='534353'
// LOG_DOCKER = 'true'
}
stages {
stage('Build') {
parallel {
stage('Build Prerequisite') {
steps {
sh 'make dev_docker'
sh 'make -C bridge mock_abi'
sh 'make -C common/bytecode all'
}
}
stage('Check Bridge Compilation') {
steps {
sh 'make -C bridge bridge_bins'
}
}
stage('Check Coordinator Compilation') {
steps {
sh 'export PATH=/home/ubuntu/go/bin:$PATH'
sh 'make -C coordinator coordinator'
}
}
stage('Check Database Compilation') {
steps {
sh 'make -C database db_cli'
}
}
stage('Check Database Docker Build') {
steps {
sh 'make -C database docker'
}
}
}
}
stage('Parallel Test') {
parallel{
stage('Race test common package') {
steps {
sh 'go test -v -race -coverprofile=coverage.common.txt -covermode=atomic scroll-tech/common/...'
}
}
stage('Race test bridge package') {
steps {
sh "cd ./bridge && ../build/run_tests.sh bridge"
}
}
stage('Race test coordinator package') {
steps {
sh 'cd ./coordinator && go test -exec "env LD_LIBRARY_PATH=${PWD}/verifier/lib" -v -race -gcflags="-l" -ldflags="-s=false" -coverpkg="scroll-tech/coordinator" -coverprofile=../coverage.coordinator.txt -covermode=atomic ./...'
}
}
stage('Race test database package') {
steps {
sh 'go test -v -race -coverprofile=coverage.db.txt -covermode=atomic scroll-tech/database/...'
}
}
stage('Integration test') {
steps {
sh 'go test -v -tags="mock_prover mock_verifier" -p 1 scroll-tech/integration-test/...'
}
}
}
}
stage('Compare Coverage') {
steps {
sh "./build/post-test-report-coverage.sh"
script {
currentBuild.result = 'SUCCESS'
}
step([$class: 'CompareCoverageAction', publishResultAs: 'Comment', scmVars: [GIT_URL: env.GIT_URL]])
}
}
}
post {
always {
publishCoverage adapters: [coberturaReportAdapter(path: 'cobertura.xml', thresholds: [[thresholdTarget: 'Aggregated Report', unhealthyThreshold: 40.0]])], checksName: '', sourceFileResolver: sourceFiles('NEVER_STORE')
cleanWs()
slackSend(message: "${JOB_BASE_NAME} ${GIT_COMMIT} #${BUILD_NUMBER} deploy ${currentBuild.result}")
}
}
}

View File

@@ -1,9 +1,9 @@
# Scroll Monorepo
[![Contracts](https://github.com/scroll-tech/scroll/actions/workflows/contracts.yaml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/contracts.yaml) [![Bridge](https://github.com/scroll-tech/scroll/actions/workflows/bridge.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/bridge.yml) [![Coordinator](https://github.com/scroll-tech/scroll/actions/workflows/coordinator.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/coordinator.yml) [![Database](https://github.com/scroll-tech/scroll/actions/workflows/database.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/database.yml) [![Common](https://github.com/scroll-tech/scroll/actions/workflows/common.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/common.yml) [![Roller](https://github.com/scroll-tech/scroll/actions/workflows/roller.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/roller.yml)
[![codecov](https://codecov.io/gh/scroll-tech/scroll/branch/develop/graph/badge.svg?token=VJVHNQWGGW)](https://codecov.io/gh/scroll-tech/scroll)
## Prerequisites
+ Go 1.18
+ Go 1.19
+ Rust (for version, see [rust-toolchain](./common/libzkp/impl/rust-toolchain))
+ Hardhat / Foundry
+ Docker

View File

@@ -325,3 +325,26 @@ type L2FailedRelayedMessageEvent struct {
type L2RelayedMessageEvent struct {
MessageHash common.Hash
}
// IScrollChainBatch is an auto generated low-level Go binding around an user-defined struct.
type IScrollChainBatch struct {
Blocks []IScrollChainBlockContext
PrevStateRoot common.Hash
NewStateRoot common.Hash
WithdrawTrieRoot common.Hash
BatchIndex uint64
ParentBatchHash common.Hash
L2Transactions []byte
}
// IScrollChainBlockContext is an auto generated low-level Go binding around an user-defined struct.
type IScrollChainBlockContext struct {
BlockHash common.Hash
ParentHash common.Hash
BlockNumber uint64
Timestamp uint64
BaseFee *big.Int
GasLimit uint64
NumTransactions uint16
NumL1Messages uint16
}

View File

@@ -5,6 +5,7 @@ import (
"os"
"github.com/ethereum/go-ethereum/log"
"github.com/iris-contrib/middleware/cors"
"github.com/kataras/iris/v12"
"github.com/kataras/iris/v12/mvc"
"github.com/urfave/cli/v2"
@@ -60,6 +61,11 @@ func init() {
}
func action(ctx *cli.Context) error {
corsOptions := cors.New(cors.Options{
AllowedOrigins: []string{"*"},
AllowedMethods: []string{"GET", "POST", "PUT", "DELETE"},
AllowCredentials: true,
})
// Load config file.
cfgFile := ctx.String(cutils.ConfigFileFlag.Name)
cfg, err := config.NewConfig(cfgFile)
@@ -72,13 +78,14 @@ func action(ctx *cli.Context) error {
}
defer database.Close()
bridgeApp := iris.New()
bridgeApp.UseRouter(corsOptions)
bridgeApp.Get("/ping", pong).Describe("healthcheck")
mvc.Configure(bridgeApp.Party("/api/txs"), setupQueryByAddressHandler)
mvc.Configure(bridgeApp.Party("/api/txsbyhashes"), setupQueryByHashHandler)
// TODO: make debug mode configurable
err = bridgeApp.Listen(":8080", iris.WithLogLevel("debug"))
err = bridgeApp.Listen(cfg.Server.HostPort, iris.WithLogLevel("debug"))
if err != nil {
log.Crit("can not start server", "err", err)
}

View File

@@ -98,6 +98,14 @@ func action(ctx *cli.Context) error {
go l2crossMsgFetcher.Start()
defer l2crossMsgFetcher.Stop()
l1BlocktimeFetcher := cross_msg.NewBlocktimestampFetcher(subCtx, uint(cfg.L1.Confirmation), int(cfg.L1.BlockTime), l1client, db.UpdateL1Blocktimestamp, db.GetL1EarliestNoBlocktimestampHeight)
go l1BlocktimeFetcher.Start()
defer l1BlocktimeFetcher.Stop()
l2BlocktimeFetcher := cross_msg.NewBlocktimestampFetcher(subCtx, uint(cfg.L2.Confirmation), int(cfg.L2.BlockTime), l2client, db.UpdateL2Blocktimestamp, db.GetL2EarliestNoBlocktimestampHeight)
go l2BlocktimeFetcher.Start()
defer l2BlocktimeFetcher.Stop()
// Catch CTRL-C to ensure a graceful shutdown.
interrupt := make(chan os.Signal, 1)
signal.Notify(interrupt, os.Interrupt)

View File

@@ -2,33 +2,36 @@
"l1": {
"confirmation": 64,
"endpoint": "https://rpc.ankr.com/eth_goerli",
"startHeight": 8890194 ,
"startHeight": 9090194 ,
"blockTime": 10,
"MessengerAddr": "0x5260e38080BFe97e6C4925d9209eCc5f964373b6",
"ETHGatewayAddr": "0x429b73A21cF3BF1f3E696a21A95408161daF311f",
"WETHGatewayAddr": "0x8be69E499D8848DfFb4cF9bac909f3e2cF2FeFa0",
"StandardERC20Gateway": "0xeF37207c1A1efF6D6a9d7BfF3cF4270e406d319b",
"CustomERC20GatewayAddr": "0x920f906B814597cF5DC76F95100F09CBAF9c5748",
"ERC721GatewayAddr": "0x1C441Dfc5C2eD7A2AA8636748A664E59CB029157",
"ERC1155GatewayAddr": "0xd1bE599aaCBC21448fD6373bbc7c1b4c7806f135"
"MessengerAddr": "0x326517Eb8eB1Ce5eaB5b513C2e9A24839b402d90",
"ETHGatewayAddr": "0x8305cB7B8448677736095965B63d7431017328fe",
"WETHGatewayAddr": "0xe3bA3c60d99a2d9a5f817734bC85353470b23931",
"StandardERC20Gateway": "0x16c1079B27eD9c363B7D08aC5Ae937A398972A5C",
"CustomERC20GatewayAddr": "0x61f08caD3d6F77801167d3bA8669433701586643",
"ERC721GatewayAddr": "0x4A73D25A4C99CB912acaf6C5B5e554f2982201c5",
"ERC1155GatewayAddr": "0xa3F5DD3033698c2832C53f3C3Fe6E062F58cD808"
},
"l2": {
"confirmation": 1,
"endpoint": "https://alpha-rpc.scroll.io/l2",
"endpoint": "http://staging-l2geth-rpc0.scroll.tech:8545",
"blockTime": 3,
"startHeight": 1600068,
"CustomERC20GatewayAddr": "0xa07Cb742657294C339fB4d5d6CdF3fdBeE8C1c68",
"ERC721GatewayAddr": "0x8Fee20e0C0Ef16f2898a8073531a857D11b9C700",
"StandardERC20Gateway": "0xB878F37BB278bf0e4974856fFe86f5e6F66BD725",
"MessengerAddr": "0xb75d7e84517e1504C151B270255B087Fd746D34C",
"ETHGatewayAddr": "0x32139B5C8838E94fFcD83E60dff95Daa7F0bA14c",
"WETHGatewayAddr": "0xBb88bF582F2BBa46702621dae5CB9271057bC85b",
"ERC1155GatewayAddr": "0x2946cB860028276b3C4bccE1767841641C2E0828"
"startHeight": 0,
"CustomERC20GatewayAddr": "0x905db21f836749fEeD12de781afc4A5Ab4Dd0d51",
"ERC721GatewayAddr": "0xC53D835514780664BCd7eCfcE7c2E5d9554dc41B",
"StandardERC20Gateway": "0x90271634BCB020e06ea4840C3f7aa61b8F860651",
"MessengerAddr": "0xE8b0956Ac75c65Aa1669e83888DA13afF2E108f4",
"ETHGatewayAddr": "0xD5938590D5dD8ce95812D4D515a219C12C551D67",
"WETHGatewayAddr": "0xb0aaA582564fade4232a16fdB1383004A6A7247F",
"ERC1155GatewayAddr": "0x4f33B1655619c2C0B7C450128Df760B4365Cb549"
},
"db": {
"dsn": "postgres://postgres:1234@localhost:5444/test?sslmode=disable",
"driverName": "postgres",
"maxOpenNum": 200,
"maxIdleNum": 20
},
"server": {
"hostPort": "0.0.0.0:20006"
}
}

View File

@@ -30,6 +30,10 @@ type LayerConfig struct {
CustomERC20GatewayAddr string `json:"CustomERC20GatewayAddr"`
}
type ServerConfig struct {
HostPort string `json:"hostPort"`
}
// Config is the configuration of the bridge history backend
type Config struct {
// chain config
@@ -37,7 +41,8 @@ type Config struct {
L2 *LayerConfig `json:"l2"`
// data source name
DB *DBConfig `json:"db"`
DB *DBConfig `json:"db"`
Server *ServerConfig `json:"server"`
}
// NewConfig returns a new instance of Config.

View File

@@ -16,7 +16,7 @@ type QueryHashController struct {
}
func (c *QueryAddressController) Get(req model.QueryByAddressRequest) (*model.QueryByAddressResponse, error) {
message, err := c.Service.GetTxsByAddress(common.HexToAddress(req.Address), int64(req.Offset), int64(req.Limit))
message, total, err := c.Service.GetTxsByAddress(common.HexToAddress(req.Address), int64(req.Offset), int64(req.Limit))
if err != nil {
return &model.QueryByAddressResponse{Message: "500", Data: &model.Data{}}, err
}
@@ -24,7 +24,7 @@ func (c *QueryAddressController) Get(req model.QueryByAddressRequest) (*model.Qu
return &model.QueryByAddressResponse{Message: "ok",
Data: &model.Data{
Result: message,
Total: len(message),
Total: total,
}}, nil
}

View File

@@ -0,0 +1,79 @@
package cross_msg
import (
"context"
"math/big"
"time"
"github.com/ethereum/go-ethereum/ethclient"
"github.com/ethereum/go-ethereum/log"
)
type GetEarliestNoBlocktimestampHeightFunc func() (uint64, error)
type UpdateBlocktimestampFunc func(height uint64, timestamp time.Time) error
type BlocktimestampFetcher struct {
ctx context.Context
confirmation uint
blockTimeInSec int
client *ethclient.Client
updateBlocktimestampFunc UpdateBlocktimestampFunc
getEarliestNoBlocktimestampHeightFunc GetEarliestNoBlocktimestampHeightFunc
}
func NewBlocktimestampFetcher(ctx context.Context, confirmation uint, blockTimeInSec int, client *ethclient.Client, updateBlocktimestampFunc UpdateBlocktimestampFunc, getEarliestNoBlocktimestampHeightFunc GetEarliestNoBlocktimestampHeightFunc) *BlocktimestampFetcher {
return &BlocktimestampFetcher{
ctx: ctx,
confirmation: confirmation,
blockTimeInSec: blockTimeInSec,
client: client,
getEarliestNoBlocktimestampHeightFunc: getEarliestNoBlocktimestampHeightFunc,
updateBlocktimestampFunc: updateBlocktimestampFunc,
}
}
func (b *BlocktimestampFetcher) Start() {
go func() {
tick := time.NewTicker(time.Duration(b.blockTimeInSec) * time.Second)
for {
select {
case <-b.ctx.Done():
tick.Stop()
return
case <-tick.C:
number, err := b.client.BlockNumber(b.ctx)
if err != nil {
log.Error("Can not get latest block number", "err", err)
continue
}
startHeight, err := b.getEarliestNoBlocktimestampHeightFunc()
if err != nil {
log.Error("Can not get latest record without block timestamp", "err", err)
continue
}
for height := startHeight; number >= height+uint64(b.confirmation) && height > 0; {
block, err := b.client.HeaderByNumber(b.ctx, new(big.Int).SetUint64(height))
if err != nil {
log.Error("Can not get block by number", "err", err)
break
}
err = b.updateBlocktimestampFunc(height, time.Unix(int64(block.Time), 0))
if err != nil {
log.Error("Can not update blocktimstamp into DB ", "err", err)
break
}
height, err = b.getEarliestNoBlocktimestampHeightFunc()
if err != nil {
log.Error("Can not get latest record without block timestamp", "err", err)
break
}
}
}
}
}()
}
func (b *BlocktimestampFetcher) Stop() {
log.Info("BlocktimestampFetcher Stop")
b.ctx.Done()
}

View File

@@ -1,7 +1,6 @@
package cross_msg_test
import (
"bridge-history-api/cross_msg"
"crypto/rand"
"math/big"
"testing"
@@ -9,6 +8,8 @@ import (
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
"github.com/stretchr/testify/assert"
"bridge-history-api/cross_msg"
)
func TestMergeIntoList(t *testing.T) {

View File

@@ -0,0 +1,190 @@
package cross_msg
import (
"github.com/ethereum/go-ethereum/common"
"bridge-history-api/utils"
)
// MaxHeight is the maixium possible height of withdraw trie
const MaxHeight = 40
// WithdrawTrie is an append only merkle trie
type WithdrawTrie struct {
// used to rebuild the merkle tree
NextMessageNonce uint64
height int // current height of withdraw trie
branches []common.Hash
zeroes []common.Hash
}
// NewWithdrawTrie will return a new instance of WithdrawTrie
func NewWithdrawTrie() *WithdrawTrie {
zeroes := make([]common.Hash, MaxHeight)
branches := make([]common.Hash, MaxHeight)
zeroes[0] = common.Hash{}
for i := 1; i < MaxHeight; i++ {
zeroes[i] = utils.Keccak2(zeroes[i-1], zeroes[i-1])
}
return &WithdrawTrie{
zeroes: zeroes,
branches: branches,
height: -1,
NextMessageNonce: 0,
}
}
// Initialize will initialize the merkle trie with rightest leaf node
func (w *WithdrawTrie) Initialize(currentMessageNonce uint64, msgHash common.Hash, proofBytes []byte) {
proof := DecodeBytesToMerkleProof(proofBytes)
branches := RecoverBranchFromProof(proof, currentMessageNonce, msgHash)
w.height = len(proof)
w.branches = branches
w.NextMessageNonce = currentMessageNonce + 1
}
// AppendMessages appends a list of new messages as leaf nodes to the rightest of the tree and returns the proofs for all messages.
func (w *WithdrawTrie) AppendMessages(hashes []common.Hash) [][]byte {
length := len(hashes)
if length == 0 {
return make([][]byte, 0)
}
cache := make([]map[uint64]common.Hash, MaxHeight)
for h := 0; h < MaxHeight; h++ {
cache[h] = make(map[uint64]common.Hash)
}
// cache all branches will be used later.
if w.NextMessageNonce != 0 {
index := w.NextMessageNonce
for h := 0; h <= w.height; h++ {
if index%2 == 1 {
// right child, `w.branches[h]` is the corresponding left child
// the index of left child should be `index ^ 1`.
cache[h][index^1] = w.branches[h]
}
index >>= 1
}
}
// cache all new leaves
for i := 0; i < length; i++ {
cache[0][w.NextMessageNonce+uint64(i)] = hashes[i]
}
// build withdraw trie with new hashes
minIndex := w.NextMessageNonce
maxIndex := w.NextMessageNonce + uint64(length) - 1
for h := 0; maxIndex > 0; h++ {
if minIndex%2 == 1 {
minIndex--
}
if maxIndex%2 == 0 {
cache[h][maxIndex^1] = w.zeroes[h]
}
for i := minIndex; i <= maxIndex; i += 2 {
cache[h+1][i>>1] = utils.Keccak2(cache[h][i], cache[h][i^1])
}
minIndex >>= 1
maxIndex >>= 1
}
// update branches using hashes one by one
for i := 0; i < length; i++ {
proof := UpdateBranchWithNewMessage(w.zeroes, w.branches, w.NextMessageNonce, hashes[i])
w.NextMessageNonce++
w.height = len(proof)
}
proofs := make([][]byte, length)
// retrieve merkle proof from cache
for i := 0; i < length; i++ {
index := w.NextMessageNonce + uint64(i) - uint64(length)
var merkleProof []common.Hash
for h := 0; h < w.height; h++ {
merkleProof = append(merkleProof, cache[h][index^1])
index >>= 1
}
proofs[i] = EncodeMerkleProofToBytes(merkleProof)
}
return proofs
}
// MessageRoot return the current root hash of withdraw trie.
func (w *WithdrawTrie) MessageRoot() common.Hash {
if w.height == -1 {
return common.Hash{}
}
return w.branches[w.height]
}
// DecodeBytesToMerkleProof transfer byte array to bytes32 array. The caller should make sure the length is matched.
func DecodeBytesToMerkleProof(proofBytes []byte) []common.Hash {
proof := make([]common.Hash, len(proofBytes)/32)
for i := 0; i < len(proofBytes); i += 32 {
proof[i/32] = common.BytesToHash(proofBytes[i : i+32])
}
return proof
}
// EncodeMerkleProofToBytes transfer byte32 array to byte array by concatenation.
func EncodeMerkleProofToBytes(proof []common.Hash) []byte {
var proofBytes []byte
for i := 0; i < len(proof); i++ {
proofBytes = append(proofBytes, proof[i][:]...)
}
return proofBytes
}
// UpdateBranchWithNewMessage update the branches to latest with new message and return the merkle proof for the message.
func UpdateBranchWithNewMessage(zeroes []common.Hash, branches []common.Hash, index uint64, msgHash common.Hash) []common.Hash {
root := msgHash
var merkleProof []common.Hash
var height uint64
for height = 0; index > 0; height++ {
if index%2 == 0 {
// it may be used in next round.
branches[height] = root
merkleProof = append(merkleProof, zeroes[height])
// it's a left child, the right child must be null
root = utils.Keccak2(root, zeroes[height])
} else {
// it's a right child, use previously computed hash
root = utils.Keccak2(branches[height], root)
merkleProof = append(merkleProof, branches[height])
}
index >>= 1
}
branches[height] = root
return merkleProof
}
// RecoverBranchFromProof will recover latest branches from merkle proof and message hash
func RecoverBranchFromProof(proof []common.Hash, index uint64, msgHash common.Hash) []common.Hash {
branches := make([]common.Hash, 64)
root := msgHash
var height uint64
for height = 0; index > 0; height++ {
if index%2 == 0 {
branches[height] = root
// it's a left child, the right child must be null
root = utils.Keccak2(root, proof[height])
} else {
// it's a right child, use previously computed hash
branches[height] = proof[height]
root = utils.Keccak2(proof[height], root)
}
index >>= 1
}
branches[height] = root
for height++; height < 64; height++ {
branches[height] = common.Hash{}
}
return branches
}

View File

@@ -0,0 +1,213 @@
package cross_msg_test
import (
"math/big"
"testing"
"github.com/ethereum/go-ethereum/common"
"github.com/stretchr/testify/assert"
"bridge-history-api/cross_msg"
"bridge-history-api/utils"
)
func TestUpdateBranchWithNewMessage(t *testing.T) {
zeroes := make([]common.Hash, 64)
branches := make([]common.Hash, 64)
zeroes[0] = common.Hash{}
for i := 1; i < 64; i++ {
zeroes[i] = utils.Keccak2(zeroes[i-1], zeroes[i-1])
}
cross_msg.UpdateBranchWithNewMessage(zeroes, branches, 0, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000001"))
if branches[0] != common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000001") {
t.Fatalf("Invalid root, want %s, got %s", "0x0000000000000000000000000000000000000000000000000000000000000001", branches[0].Hex())
}
cross_msg.UpdateBranchWithNewMessage(zeroes, branches, 1, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000002"))
if branches[1] != common.HexToHash("0xe90b7bceb6e7df5418fb78d8ee546e97c83a08bbccc01a0644d599ccd2a7c2e0") {
t.Fatalf("Invalid root, want %s, got %s", "0xe90b7bceb6e7df5418fb78d8ee546e97c83a08bbccc01a0644d599ccd2a7c2e0", branches[1].Hex())
}
cross_msg.UpdateBranchWithNewMessage(zeroes, branches, 2, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000003"))
if branches[2] != common.HexToHash("0x222ff5e0b5877792c2bc1670e2ccd0c2c97cd7bb1672a57d598db05092d3d72c") {
t.Fatalf("Invalid root, want %s, got %s", "0x222ff5e0b5877792c2bc1670e2ccd0c2c97cd7bb1672a57d598db05092d3d72c", branches[2].Hex())
}
cross_msg.UpdateBranchWithNewMessage(zeroes, branches, 3, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000004"))
if branches[2] != common.HexToHash("0xa9bb8c3f1f12e9aa903a50c47f314b57610a3ab32f2d463293f58836def38d36") {
t.Fatalf("Invalid root, want %s, got %s", "0xa9bb8c3f1f12e9aa903a50c47f314b57610a3ab32f2d463293f58836def38d36", branches[2].Hex())
}
}
func TestDecodeEncodeMerkleProof(t *testing.T) {
proof := cross_msg.DecodeBytesToMerkleProof(common.Hex2Bytes("2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d49012ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d49022ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d49032ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4904"))
if len(proof) != 4 {
t.Fatalf("proof length mismatch, want %d, got %d", 4, len(proof))
}
if proof[0] != common.HexToHash("0x2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4901") {
t.Fatalf("proof[0] mismatch, want %s, got %s", "0x2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4901", proof[0].Hex())
}
if proof[1] != common.HexToHash("0x2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4902") {
t.Fatalf("proof[1] mismatch, want %s, got %s", "0x2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4902", proof[0].Hex())
}
if proof[2] != common.HexToHash("0x2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4903") {
t.Fatalf("proof[2] mismatch, want %s, got %s", "0x2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4903", proof[0].Hex())
}
if proof[3] != common.HexToHash("0x2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4904") {
t.Fatalf("proof[3] mismatch, want %s, got %s", "0x2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4904", proof[0].Hex())
}
bytes := cross_msg.EncodeMerkleProofToBytes(proof)
if common.Bytes2Hex(bytes) != "2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d49012ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d49022ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d49032ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4904" {
t.Fatalf("wrong encoded bytes")
}
}
func TestRecoverBranchFromProof(t *testing.T) {
zeroes := make([]common.Hash, 64)
branches := make([]common.Hash, 64)
zeroes[0] = common.Hash{}
for i := 1; i < 64; i++ {
zeroes[i] = utils.Keccak2(zeroes[i-1], zeroes[i-1])
}
proof := cross_msg.UpdateBranchWithNewMessage(zeroes, branches, 0, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000001"))
tmpBranches := cross_msg.RecoverBranchFromProof(proof, 0, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000001"))
for i := 0; i < 64; i++ {
if tmpBranches[i] != branches[i] {
t.Fatalf("Invalid branch, want %s, got %s", branches[i].Hex(), tmpBranches[i].Hex())
}
}
proof = cross_msg.UpdateBranchWithNewMessage(zeroes, branches, 1, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000002"))
tmpBranches = cross_msg.RecoverBranchFromProof(proof, 1, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000002"))
for i := 0; i < 64; i++ {
if tmpBranches[i] != branches[i] {
t.Fatalf("Invalid branch, want %s, got %s", branches[i].Hex(), tmpBranches[i].Hex())
}
}
proof = cross_msg.UpdateBranchWithNewMessage(zeroes, branches, 2, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000003"))
tmpBranches = cross_msg.RecoverBranchFromProof(proof, 2, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000003"))
for i := 0; i < 64; i++ {
if tmpBranches[i] != branches[i] {
t.Fatalf("Invalid branch, want %s, got %s", branches[i].Hex(), tmpBranches[i].Hex())
}
}
proof = cross_msg.UpdateBranchWithNewMessage(zeroes, branches, 3, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000004"))
tmpBranches = cross_msg.RecoverBranchFromProof(proof, 3, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000004"))
for i := 0; i < 64; i++ {
if tmpBranches[i] != branches[i] {
t.Fatalf("Invalid branch, want %s, got %s", branches[i].Hex(), tmpBranches[i].Hex())
}
}
}
func TestWithdrawTrieOneByOne(t *testing.T) {
for initial := 0; initial < 128; initial++ {
withdrawTrie := cross_msg.NewWithdrawTrie()
var hashes []common.Hash
for i := 0; i < initial; i++ {
hash := common.BigToHash(big.NewInt(int64(i + 1)))
hashes = append(hashes, hash)
withdrawTrie.AppendMessages([]common.Hash{
hash,
})
}
for i := initial; i < 128; i++ {
hash := common.BigToHash(big.NewInt(int64(i + 1)))
hashes = append(hashes, hash)
expectedRoot := computeMerkleRoot(hashes)
proofBytes := withdrawTrie.AppendMessages([]common.Hash{
hash,
})
assert.Equal(t, withdrawTrie.NextMessageNonce, uint64(i+1))
assert.Equal(t, expectedRoot.String(), withdrawTrie.MessageRoot().String())
proof := cross_msg.DecodeBytesToMerkleProof(proofBytes[0])
verifiedRoot := verifyMerkleProof(uint64(i), hash, proof)
assert.Equal(t, expectedRoot.String(), verifiedRoot.String())
}
}
}
func TestWithdrawTrieMultiple(t *testing.T) {
var expectedRoots []common.Hash
{
var hashes []common.Hash
for i := 0; i < 128; i++ {
hash := common.BigToHash(big.NewInt(int64(i + 1)))
hashes = append(hashes, hash)
expectedRoots = append(expectedRoots, computeMerkleRoot(hashes))
}
}
for initial := 0; initial < 100; initial++ {
var hashes []common.Hash
for i := 0; i < initial; i++ {
hash := common.BigToHash(big.NewInt(int64(i + 1)))
hashes = append(hashes, hash)
}
for finish := initial; finish < 100; finish++ {
withdrawTrie := cross_msg.NewWithdrawTrie()
withdrawTrie.AppendMessages(hashes)
var newHashes []common.Hash
for i := initial; i <= finish; i++ {
hash := common.BigToHash(big.NewInt(int64(i + 1)))
newHashes = append(newHashes, hash)
}
proofBytes := withdrawTrie.AppendMessages(newHashes)
assert.Equal(t, withdrawTrie.NextMessageNonce, uint64(finish+1))
assert.Equal(t, expectedRoots[finish].String(), withdrawTrie.MessageRoot().String())
for i := initial; i <= finish; i++ {
hash := common.BigToHash(big.NewInt(int64(i + 1)))
proof := cross_msg.DecodeBytesToMerkleProof(proofBytes[i-initial])
verifiedRoot := verifyMerkleProof(uint64(i), hash, proof)
assert.Equal(t, expectedRoots[finish].String(), verifiedRoot.String())
}
}
}
}
func verifyMerkleProof(index uint64, leaf common.Hash, proof []common.Hash) common.Hash {
root := leaf
for _, h := range proof {
if index%2 == 0 {
root = utils.Keccak2(root, h)
} else {
root = utils.Keccak2(h, root)
}
index >>= 1
}
return root
}
func computeMerkleRoot(hashes []common.Hash) common.Hash {
if len(hashes) == 0 {
return common.Hash{}
}
zeroHash := common.Hash{}
for {
if len(hashes) == 1 {
break
}
var newHashes []common.Hash
for i := 0; i < len(hashes); i += 2 {
if i+1 < len(hashes) {
newHashes = append(newHashes, utils.Keccak2(hashes[i], hashes[i+1]))
} else {
newHashes = append(newHashes, utils.Keccak2(hashes[i], zeroHash))
}
}
hashes = newHashes
zeroHash = utils.Keccak2(zeroHash, zeroHash)
}
return hashes[0]
}

View File

@@ -2,23 +2,24 @@
-- +goose StatementBegin
create table cross_message
(
id BIGSERIAL PRIMARY KEY,
msg_hash VARCHAR NOT NULL DEFAULT '',
height BIGINT NOT NULL,
sender VARCHAR NOT NULL,
target VARCHAR NOT NULL,
amount VARCHAR NOT NULL,
layer1_hash VARCHAR NOT NULL DEFAULT '',
layer2_hash VARCHAR NOT NULL DEFAULT '',
layer1_token VARCHAR NOT NULL DEFAULT '',
layer2_token VARCHAR NOT NULL DEFAULT '',
token_id BIGINT NOT NULL DEFAULT 0,
asset SMALLINT NOT NULL,
msg_type SMALLINT NOT NULL,
is_deleted BOOLEAN NOT NULL DEFAULT FALSE,
created_at TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at TIMESTAMP(0) DEFAULT NULL
id BIGSERIAL PRIMARY KEY,
msg_hash VARCHAR NOT NULL DEFAULT '',
height BIGINT NOT NULL,
sender VARCHAR NOT NULL,
target VARCHAR NOT NULL,
amount VARCHAR NOT NULL,
layer1_hash VARCHAR NOT NULL DEFAULT '',
layer2_hash VARCHAR NOT NULL DEFAULT '',
layer1_token VARCHAR NOT NULL DEFAULT '',
layer2_token VARCHAR NOT NULL DEFAULT '',
token_id BIGINT NOT NULL DEFAULT 0,
asset SMALLINT NOT NULL,
msg_type SMALLINT NOT NULL,
is_deleted BOOLEAN NOT NULL DEFAULT FALSE,
block_timestamp TIMESTAMP(0) DEFAULT NULL,
created_at TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at TIMESTAMP(0) DEFAULT NULL
);
comment
@@ -48,20 +49,20 @@ CREATE TRIGGER update_timestamp BEFORE UPDATE
ON cross_message FOR EACH ROW EXECUTE PROCEDURE
update_timestamp();
CREATE OR REPLACE FUNCTION delete_at_trigger()
CREATE OR REPLACE FUNCTION deleted_at_trigger()
RETURNS TRIGGER AS $$
BEGIN
IF NEW.is_deleted AND OLD.is_deleted != NEW.is_deleted THEN
UPDATE cross_message SET delete_at = NOW() WHERE id = NEW.id;
UPDATE cross_message SET deleted_at = NOW() WHERE id = NEW.id;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER delete_at_trigger
CREATE TRIGGER deleted_at_trigger
AFTER UPDATE ON cross_message
FOR EACH ROW
EXECUTE FUNCTION delete_at_trigger();
EXECUTE FUNCTION deleted_at_trigger();
-- +goose StatementEnd

View File

@@ -31,20 +31,20 @@ CREATE TRIGGER update_timestamp BEFORE UPDATE
ON relayed_msg FOR EACH ROW EXECUTE PROCEDURE
update_timestamp();
CREATE OR REPLACE FUNCTION delete_at_trigger()
CREATE OR REPLACE FUNCTION deleted_at_trigger()
RETURNS TRIGGER AS $$
BEGIN
IF NEW.is_deleted AND OLD.is_deleted != NEW.is_deleted THEN
UPDATE relayed_msg SET delete_at = NOW() WHERE id = NEW.id;
UPDATE relayed_msg SET deleted_at = NOW() WHERE id = NEW.id;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER delete_at_trigger
CREATE TRIGGER deleted_at_trigger
AFTER UPDATE ON relayed_msg
FOR EACH ROW
EXECUTE FUNCTION delete_at_trigger();
EXECUTE FUNCTION deleted_at_trigger();
-- +goose StatementEnd

View File

@@ -54,6 +54,7 @@ type CrossMsg struct {
Asset int `json:"asset" db:"asset"`
MsgType int `json:"msg_type" db:"msg_type"`
IsDeleted bool `json:"is_deleted" db:"is_deleted"`
Timestamp *time.Time `json:"timestamp" db:"block_timestamp"`
CreatedAt *time.Time `json:"created_at" db:"created_at"`
UpdatedAt *time.Time `json:"updated_at" db:"updated_at"`
DeletedAt *time.Time `json:"deleted_at" db:"deleted_at"`
@@ -76,6 +77,8 @@ type L1CrossMsgOrm interface {
UpdateL1CrossMsgHash(ctx context.Context, l1Hash, msgHash common.Hash) error
GetLatestL1ProcessedHeight() (int64, error)
DeleteL1CrossMsgAfterHeightDBTx(dbTx *sqlx.Tx, height int64) error
UpdateL1Blocktimestamp(height uint64, timestamp time.Time) error
GetL1EarliestNoBlocktimestampHeight() (uint64, error)
}
// L2CrossMsgOrm provides operations on l2_cross_message table
@@ -88,6 +91,8 @@ type L2CrossMsgOrm interface {
UpdateL2CrossMsgHash(ctx context.Context, l2Hash, msgHash common.Hash) error
GetLatestL2ProcessedHeight() (int64, error)
DeleteL2CrossMsgFromHeightDBTx(dbTx *sqlx.Tx, height int64) error
UpdateL2Blocktimestamp(height uint64, timestamp time.Time) error
GetL2EarliestNoBlocktimestampHeight() (uint64, error)
}
type RelayedMsgOrm interface {

View File

@@ -4,6 +4,7 @@ import (
"context"
"database/sql"
"errors"
"time"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/log"
@@ -118,3 +119,22 @@ func (l *l1CrossMsgOrm) DeleteL1CrossMsgAfterHeightDBTx(dbTx *sqlx.Tx, height in
}
return nil
}
func (l *l1CrossMsgOrm) UpdateL1Blocktimestamp(height uint64, timestamp time.Time) error {
if _, err := l.db.Exec(`UPDATE cross_message SET block_timestamp = $1 where height = $2 AND msg_type = $3 AND NOT is_deleted`, timestamp, height, Layer1Msg); err != nil {
return err
}
return nil
}
func (l *l1CrossMsgOrm) GetL1EarliestNoBlocktimestampHeight() (uint64, error) {
row := l.db.QueryRowx(`SELECT height FROM cross_message WHERE block_timestamp IS NULL AND msg_type = $1 AND NOT is_deleted ORDER BY height ASC LIMIT 1;`, Layer1Msg)
var result uint64
if err := row.Scan(&result); err != nil {
if err == sql.ErrNoRows {
return 0, nil
}
return 0, err
}
return result, nil
}

View File

@@ -4,6 +4,7 @@ import (
"context"
"database/sql"
"errors"
"time"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/log"
@@ -21,7 +22,7 @@ func NewL2CrossMsgOrm(db *sqlx.DB) L2CrossMsgOrm {
func (l *l2CrossMsgOrm) GetL2CrossMsgByHash(l2Hash common.Hash) (*CrossMsg, error) {
result := &CrossMsg{}
row := l.db.QueryRowx(`SELECT * FROM l2_cross_message WHERE layer2_hash = $1 AND NOT is_deleted;`, l2Hash.String())
row := l.db.QueryRowx(`SELECT * FROM cross_message WHERE layer2_hash = $1 AND NOT is_deleted;`, l2Hash.String())
if err := row.StructScan(result); err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, nil
@@ -121,3 +122,22 @@ func (l *l2CrossMsgOrm) GetLatestL2ProcessedHeight() (int64, error) {
}
return 0, nil
}
func (l *l2CrossMsgOrm) UpdateL2Blocktimestamp(height uint64, timestamp time.Time) error {
if _, err := l.db.Exec(`UPDATE cross_message SET block_timestamp = $1 where height = $2 AND msg_type = $3 AND NOT is_deleted`, timestamp, height, Layer2Msg); err != nil {
return err
}
return nil
}
func (l *l2CrossMsgOrm) GetL2EarliestNoBlocktimestampHeight() (uint64, error) {
row := l.db.QueryRowx(`SELECT height FROM cross_message WHERE block_timestamp IS NULL AND msg_type = $1 AND NOT is_deleted ORDER BY height ASC LIMIT 1;`, Layer2Msg)
var result uint64
if err := row.Scan(&result); err != nil {
if err == sql.ErrNoRows {
return 0, nil
}
return 0, err
}
return result, nil
}

View File

@@ -16,6 +16,7 @@ type OrmFactory interface {
orm.L1CrossMsgOrm
orm.L2CrossMsgOrm
orm.RelayedMsgOrm
GetTotalCrossMsgCountByAddress(sender string) (uint64, error)
GetCrossMsgsByAddressWithOffset(sender string, offset int64, limit int64) ([]*orm.CrossMsg, error)
GetDB() *sqlx.DB
Beginx() (*sqlx.Tx, error)
@@ -59,10 +60,19 @@ func (o *ormFactory) Beginx() (*sqlx.Tx, error) {
return o.DB.Beginx()
}
func (o *ormFactory) GetTotalCrossMsgCountByAddress(sender string) (uint64, error) {
var count uint64
row := o.DB.QueryRowx(`SELECT COUNT(*) FROM cross_message WHERE sender = $1 AND NOT is_deleted;`, sender)
if err := row.Scan(&count); err != nil {
return 0, err
}
return count, nil
}
func (o *ormFactory) GetCrossMsgsByAddressWithOffset(sender string, offset int64, limit int64) ([]*orm.CrossMsg, error) {
para := sender
var results []*orm.CrossMsg
rows, err := o.DB.Queryx(`SELECT * FROM cross_message WHERE sender = $1 AND NOT is_deleted ORDER BY id DESC LIMIT $2 OFFSET $3;`, para, limit, offset)
rows, err := o.DB.Queryx(`SELECT * FROM cross_message WHERE sender = $1 AND NOT is_deleted ORDER BY block_timestamp DESC NULLS FIRST, id DESC LIMIT $2 OFFSET $3;`, para, limit, offset)
if err != nil || rows == nil {
return nil, err
}

View File

@@ -1,9 +1,10 @@
module bridge-history-api
go 1.20
go 1.19
require (
github.com/ethereum/go-ethereum v1.11.6
github.com/ethereum/go-ethereum v1.12.0
github.com/iris-contrib/middleware/cors v0.0.0-20230531125531-980d3a09a458
github.com/jmoiron/sqlx v1.3.5
github.com/kataras/iris/v12 v12.2.0
github.com/lib/pq v1.10.7
@@ -26,6 +27,7 @@ require (
github.com/andybalholm/brotli v1.0.5 // indirect
github.com/aymerick/douceur v0.2.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bits-and-blooms/bitset v1.7.0 // indirect
github.com/blang/semver/v4 v4.0.0 // indirect
github.com/btcsuite/btcd v0.20.1-beta // indirect
github.com/btcsuite/btcd/btcec/v2 v2.3.2 // indirect
@@ -34,14 +36,17 @@ require (
github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b // indirect
github.com/cockroachdb/pebble v0.0.0-20230209160836-829675f94811 // indirect
github.com/cockroachdb/redact v1.1.3 // indirect
github.com/consensys/bavard v0.1.13 // indirect
github.com/consensys/gnark-crypto v0.10.0 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect
github.com/crate-crypto/go-kzg-4844 v0.2.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/deckarep/golang-set/v2 v2.1.0 // indirect
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0 // indirect
github.com/deepmap/oapi-codegen v1.8.2 // indirect
github.com/docker/docker v20.10.21+incompatible // indirect
github.com/edsrzf/mmap-go v1.0.0 // indirect
github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385 // indirect
github.com/ethereum/c-kzg-4844 v0.2.0 // indirect
github.com/fatih/structs v1.1.0 // indirect
github.com/fjl/memsize v0.0.0-20190710130421-bcb5799ab5e5 // indirect
github.com/flosch/pongo2/v4 v4.0.2 // indirect
@@ -92,6 +97,7 @@ require (
github.com/microcosm-cc/bluemonday v1.0.23 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/mitchellh/pointerstructure v1.2.0 // indirect
github.com/mmcloughlin/addchain v0.4.0 // indirect
github.com/nats-io/nats.go v1.23.0 // indirect
github.com/nats-io/nkeys v0.3.0 // indirect
github.com/nats-io/nuid v1.0.1 // indirect
@@ -115,6 +121,7 @@ require (
github.com/shirou/gopsutil v3.21.11+incompatible // indirect
github.com/sirupsen/logrus v1.9.0 // indirect
github.com/status-im/keycard-go v0.2.0 // indirect
github.com/supranational/blst v0.3.11-0.20230406105308-e9dfc5ee724b // indirect
github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 // indirect
github.com/tdewolff/minify/v2 v2.12.4 // indirect
github.com/tdewolff/parse/v2 v2.6.4 // indirect
@@ -142,5 +149,6 @@ require (
gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
rsc.io/tmplfunc v0.0.3 // indirect
)

View File

@@ -32,6 +32,8 @@ github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd3
github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible/go.mod h1:osfaiScAUVup+UC9Nfq76eWqDhXlp+4UYaA8uhTBO6g=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bits-and-blooms/bitset v1.7.0 h1:YjAGVd3XmtK9ktAbX8Zg2g2PwLIMjGREZJHlV4j7NEo=
github.com/bits-and-blooms/bitset v1.7.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA=
github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM=
github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ=
github.com/btcsuite/btcd v0.20.1-beta h1:Ik4hyJqN8Jfyv3S4AGBOmyouMsYE3EdYODkMbQjwPGw=
@@ -65,12 +67,18 @@ github.com/cockroachdb/pebble v0.0.0-20230209160836-829675f94811/go.mod h1:Nb5lg
github.com/cockroachdb/redact v1.1.3 h1:AKZds10rFSIj7qADf0g46UixK8NNLwWTNdCIGS5wfSQ=
github.com/cockroachdb/redact v1.1.3/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg=
github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM=
github.com/consensys/bavard v0.1.13 h1:oLhMLOFGTLdlda/kma4VOJazblc7IM5y5QPd2A/YjhQ=
github.com/consensys/bavard v0.1.13/go.mod h1:9ItSMtA/dXMAiL7BG6bqW2m3NdSEObYWoH223nGHukI=
github.com/consensys/gnark-crypto v0.10.0 h1:zRh22SR7o4K35SoNqouS9J/TKHTyU2QWaj5ldehyXtA=
github.com/consensys/gnark-crypto v0.10.0/go.mod h1:Iq/P3HHl0ElSjsg2E1gsMwhAyxnxoKK5nVyZKd+/KhU=
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w=
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/crate-crypto/go-kzg-4844 v0.2.0 h1:UVuHOE+5tIWrim4zf/Xaa43+MIsDCPyW76QhUpiMGj4=
github.com/crate-crypto/go-kzg-4844 v0.2.0/go.mod h1:SBP7ikXEgDnUPONgm33HtuDZEDtWa3L4QtN1ocJSEQ4=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/cyberdelia/templates v0.0.0-20141128023046-ca7fffd4298c/go.mod h1:GyV+0YP4qX0UQ7r2MoYZ+AvYDp12OF5yg4q8rGnyNh4=
github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@@ -92,8 +100,6 @@ github.com/djherbis/atime v1.1.0/go.mod h1:28OF6Y8s3NQWwacXc5eZTsEsiMzp7LF8MbXE+
github.com/docker/docker v20.10.21+incompatible h1:UTLdBmHk3bEY+w8qeO5KttOhy6OmXWsl/FEet9Uswog=
github.com/docker/docker v20.10.21+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/edsrzf/mmap-go v1.0.0 h1:CEBF7HpRnUCSJgGUb5h1Gm7e3VkmVDrR8lvWVLtrOFw=
github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M=
github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385 h1:clC1lXBpe2kTj2VHdaIu9ajZQe4kcEY9j0NsnDDBZ3o=
github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385/go.mod h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
@@ -101,8 +107,10 @@ github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.m
github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/etcd-io/bbolt v1.3.3/go.mod h1:ZF2nL25h33cCyBtcyWeZ2/I3HQOfTP+0PIEvHjkjCrw=
github.com/ethereum/go-ethereum v1.11.6 h1:2VF8Mf7XiSUfmoNOy3D+ocfl9Qu8baQBrCNbo2CXQ8E=
github.com/ethereum/go-ethereum v1.11.6/go.mod h1:+a8pUj1tOyJ2RinsNQD4326YS+leSoKGiG/uVVb0x6Y=
github.com/ethereum/c-kzg-4844 v0.2.0 h1:+cUvymlnoDDQgMInp25Bo3OmLajmmY8mLJ/tLjqd77Q=
github.com/ethereum/c-kzg-4844 v0.2.0/go.mod h1:WI2Nd82DMZAAZI1wV2neKGost9EKjvbpQR9OqE5Qqa8=
github.com/ethereum/go-ethereum v1.12.0 h1:bdnhLPtqETd4m3mS8BGMNvBTf36bO5bx/hxE2zljOa0=
github.com/ethereum/go-ethereum v1.12.0/go.mod h1:/oo2X/dZLJjf2mJ6YT9wcWxa4nNJDBKDBU6sFIpx1Gs=
github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8=
github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo=
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
@@ -193,6 +201,7 @@ github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
@@ -233,6 +242,8 @@ github.com/iris-contrib/go.uuid v2.0.0+incompatible h1:XZubAYg61/JwnJNbZilGjf3b3
github.com/iris-contrib/go.uuid v2.0.0+incompatible/go.mod h1:iz2lgM/1UnEf1kP0L/+fafWORmlnuysV2EMP8MW+qe0=
github.com/iris-contrib/httpexpect/v2 v2.12.1 h1:3cTZSyBBen/kfjCtgNFoUKi1u0FVXNaAjyRJOo6AVS4=
github.com/iris-contrib/jade v1.1.3/go.mod h1:H/geBymxJhShH5kecoiOCSssPX7QWYH7UaeZTSWddIk=
github.com/iris-contrib/middleware/cors v0.0.0-20230531125531-980d3a09a458 h1:V60rHQJc6DieKV1BqHIGclraPdO4kinuFAZIrPGHN7s=
github.com/iris-contrib/middleware/cors v0.0.0-20230531125531-980d3a09a458/go.mod h1:7eVziAp1yUwFB/ZMg71n84VWQH+7wukvxcHuF2e7cbg=
github.com/iris-contrib/pongo2 v0.0.1/go.mod h1:Ssh+00+3GAZqSQb30AvBRNxBx7rf0GqwkjqxNd0u65g=
github.com/iris-contrib/schema v0.0.1/go.mod h1:urYA3uvUNG1TIIjOSCzHr9/LmbQo8LrOcOqfqxa4hXw=
github.com/iris-contrib/schema v0.0.6 h1:CPSBLyx2e91H2yJzPuhGuifVRnZBBJ3pCOMbOvPZaTw=
@@ -290,6 +301,7 @@ github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0
github.com/labstack/echo/v4 v4.2.1/go.mod h1:AA49e0DZ8kk5jTOOCKNuPR6oTnBS0dYiM4FW1e6jwpg=
github.com/labstack/echo/v4 v4.5.0/go.mod h1:czIriw4a0C1dFun+ObrXp7ok03xON0N1awStJ6ArI7Y=
github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k=
github.com/leanovate/gopter v0.2.9 h1:fQjYxZaynp97ozCzfOyOuAGOU4aU/z37zf/tOujFk7c=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw=
github.com/lib/pq v1.10.7/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
@@ -341,6 +353,9 @@ github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyua
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/pointerstructure v1.2.0 h1:O+i9nHnXS3l/9Wu7r4NrEdwA2VFTicjUEN1uBnDo34A=
github.com/mitchellh/pointerstructure v1.2.0/go.mod h1:BRAsLI5zgXmw97Lf6s25bs8ohIXc3tViBH44KcwB2g4=
github.com/mmcloughlin/addchain v0.4.0 h1:SobOdjm2xLj1KkXN5/n0xTIWyZA2+s99UCY1iPfkHRY=
github.com/mmcloughlin/addchain v0.4.0/go.mod h1:A86O+tHqZLMNO4w6ZZ4FlVQEadcoqkyU72HC5wJ4RlU=
github.com/mmcloughlin/profile v0.1.1/go.mod h1:IhHD7q1ooxgwTgjxQYkACGA77oFTDdFVejUS1/tS/qU=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
@@ -453,6 +468,8 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8=
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/supranational/blst v0.3.11-0.20230406105308-e9dfc5ee724b h1:u49mjRnygnB34h8OKbnNJFVUtWSKIKb1KukdV8bILUM=
github.com/supranational/blst v0.3.11-0.20230406105308-e9dfc5ee724b/go.mod h1:jZJtfjgudtNl4en1tzwPIV3KjUnQUvG3/j+w+fVonLw=
github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 h1:epCh84lMvA70Z7CTTCmYQn2CKbY8j86K7/FAIr141uY=
github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7/go.mod h1:q4W45IWZaF22tdD+VEXcAWRA037jwmWEB5VWYORlTpc=
github.com/tdewolff/minify/v2 v2.12.4 h1:kejsHQMM17n6/gwdw53qsi6lg0TGddZADVyQOz1KMdE=
@@ -720,3 +737,5 @@ modernc.org/sqlite v1.18.1 h1:ko32eKt3jf7eqIkCgPAeHMBXw3riNSLhl2f3loEF7o8=
modernc.org/strutil v1.1.2 h1:iFBDH6j1Z0bN/Q9udJnnFoFpENA4252qe/7/5woE5MI=
modernc.org/token v1.0.0 h1:a0jaWiNMDhDUtqOj09wvjWWAqd3q7WpBulmL9H2egsk=
moul.io/http2curl/v2 v2.3.0 h1:9r3JfDzWPcbIklMOs2TnIFzDYvfAZvjeavG6EzP7jYs=
rsc.io/tmplfunc v0.0.3 h1:53XFQh69AfOa8Tw0Jm7t+GV7KZhOi6jzsCzTtKbMvzU=
rsc.io/tmplfunc v0.0.3/go.mod h1:AG3sTPzElb1Io3Yg4voV9AGZJuleGAwaVRxL9M49PhA=

View File

@@ -4,7 +4,7 @@ import "bridge-history-api/service"
type Data struct {
Result []*service.TxHistoryInfo `json:"result"`
Total int `json:"total"`
Total uint64 `json:"total"`
}
type QueryByAddressResponse struct {

View File

@@ -32,7 +32,7 @@ type TxHistoryInfo struct {
// HistoryService example service.
type HistoryService interface {
GetTxsByAddress(address common.Address, offset int64, limit int64) ([]*TxHistoryInfo, error)
GetTxsByAddress(address common.Address, offset int64, limit int64) ([]*TxHistoryInfo, uint64, error)
GetTxsByHashes(hashes []string) ([]*TxHistoryInfo, error)
}
@@ -69,20 +69,25 @@ func updateCrossTxHash(msgHash string, txInfo *TxHistoryInfo, db db.OrmFactory)
}
func (h *historyBackend) GetTxsByAddress(address common.Address, offset int64, limit int64) ([]*TxHistoryInfo, error) {
txHistories := make([]*TxHistoryInfo, 0)
func (h *historyBackend) GetTxsByAddress(address common.Address, offset int64, limit int64) ([]*TxHistoryInfo, uint64, error) {
var txHistories []*TxHistoryInfo
total, err := h.db.GetTotalCrossMsgCountByAddress(address.String())
if err != nil || total == 0 {
return txHistories, 0, err
}
result, err := h.db.GetCrossMsgsByAddressWithOffset(address.String(), offset, limit)
if err != nil {
return nil, err
return nil, 0, err
}
for _, msg := range result {
txHistory := &TxHistoryInfo{
Hash: msg.MsgHash,
Amount: msg.Amount,
To: msg.Target,
IsL1: msg.MsgType == int(orm.Layer1Msg),
BlockNumber: msg.Height,
CreatedAt: msg.CreatedAt,
Hash: msg.Layer1Hash + msg.Layer2Hash,
Amount: msg.Amount,
To: msg.Target,
IsL1: msg.MsgType == int(orm.Layer1Msg),
BlockNumber: msg.Height,
BlockTimestamp: msg.Timestamp,
CreatedAt: msg.CreatedAt,
FinalizeTx: &Finalized{
Hash: "",
},
@@ -90,7 +95,7 @@ func (h *historyBackend) GetTxsByAddress(address common.Address, offset int64, l
updateCrossTxHash(msg.MsgHash, txHistory, h.db)
txHistories = append(txHistories, txHistory)
}
return txHistories, nil
return txHistories, total, nil
}
func (h *historyBackend) GetTxsByHashes(hashes []string) ([]*TxHistoryInfo, error) {
@@ -102,12 +107,13 @@ func (h *historyBackend) GetTxsByHashes(hashes []string) ([]*TxHistoryInfo, erro
}
if l1result != nil {
txHistory := &TxHistoryInfo{
Hash: l1result.Layer1Hash,
Amount: l1result.Amount,
To: l1result.Target,
IsL1: true,
BlockNumber: l1result.Height,
CreatedAt: l1result.CreatedAt,
Hash: l1result.Layer1Hash,
Amount: l1result.Amount,
To: l1result.Target,
IsL1: true,
BlockNumber: l1result.Height,
BlockTimestamp: l1result.Timestamp,
CreatedAt: l1result.CreatedAt,
FinalizeTx: &Finalized{
Hash: "",
},
@@ -122,12 +128,13 @@ func (h *historyBackend) GetTxsByHashes(hashes []string) ([]*TxHistoryInfo, erro
}
if l2result != nil {
txHistory := &TxHistoryInfo{
Hash: l2result.Layer2Hash,
Amount: l2result.Amount,
To: l2result.Target,
IsL1: false,
BlockNumber: l2result.Height,
CreatedAt: l2result.CreatedAt,
Hash: l2result.Layer2Hash,
Amount: l2result.Amount,
To: l2result.Target,
IsL1: false,
BlockNumber: l2result.Height,
BlockTimestamp: l2result.Timestamp,
CreatedAt: l2result.CreatedAt,
FinalizeTx: &Finalized{
Hash: "",
},

File diff suppressed because one or more lines are too long

View File

@@ -1,7 +1,9 @@
package utils
import (
"bytes"
"context"
"errors"
"fmt"
"math/big"
@@ -14,6 +16,11 @@ import (
backendabi "bridge-history-api/abi"
)
// Keccak2 compute the keccack256 of two concatenations of bytes32
func Keccak2(a common.Hash, b common.Hash) common.Hash {
return common.BytesToHash(crypto.Keccak256(append(a.Bytes()[:], b.Bytes()[:]...)))
}
func GetSafeBlockNumber(ctx context.Context, client *ethclient.Client, confirmations uint64) (uint64, error) {
number, err := client.BlockNumber(ctx)
if err != nil || number <= confirmations {
@@ -54,3 +61,48 @@ func ComputeMessageHash(
data, _ := backendabi.L2ScrollMessengerABI.Pack("relayMessage", sender, target, value, messageNonce, message)
return common.BytesToHash(crypto.Keccak256(data))
}
// GetBatchRangeFromCalldataV1 find the block range from calldata, both inclusive.
func GetBatchRangeFromCalldataV1(calldata []byte) ([]uint64, []uint64, []uint64, error) {
var batchIndices []uint64
var startBlocks []uint64
var finishBlocks []uint64
if bytes.Equal(calldata[0:4], common.Hex2Bytes("cb905499")) {
// commitBatches
method := backendabi.ScrollChainABI.Methods["commitBatches"]
values, err := method.Inputs.Unpack(calldata[4:])
if err != nil {
return batchIndices, startBlocks, finishBlocks, err
}
args := make([]backendabi.IScrollChainBatch, len(values))
err = method.Inputs.Copy(&args, values)
if err != nil {
return batchIndices, startBlocks, finishBlocks, err
}
for i := 0; i < len(args); i++ {
batchIndices = append(batchIndices, args[i].BatchIndex)
startBlocks = append(startBlocks, args[i].Blocks[0].BlockNumber)
finishBlocks = append(finishBlocks, args[i].Blocks[len(args[i].Blocks)-1].BlockNumber)
}
} else if bytes.Equal(calldata[0:4], common.Hex2Bytes("8c73235d")) {
// commitBatch
method := backendabi.ScrollChainABI.Methods["commitBatch"]
values, err := method.Inputs.Unpack(calldata[4:])
if err != nil {
return batchIndices, startBlocks, finishBlocks, err
}
args := backendabi.IScrollChainBatch{}
err = method.Inputs.Copy(&args, values)
if err != nil {
return batchIndices, startBlocks, finishBlocks, err
}
batchIndices = append(batchIndices, args.BatchIndex)
startBlocks = append(startBlocks, args.Blocks[0].BlockNumber)
finishBlocks = append(finishBlocks, args.Blocks[len(args.Blocks)-1].BlockNumber)
} else {
return batchIndices, startBlocks, finishBlocks, errors.New("invalid selector")
}
return batchIndices, startBlocks, finishBlocks, nil
}

View File

@@ -0,0 +1,48 @@
package utils_test
import (
"os"
"testing"
"github.com/ethereum/go-ethereum/common"
"github.com/stretchr/testify/assert"
"bridge-history-api/utils"
)
func TestKeccak2(t *testing.T) {
a := common.HexToHash("0xe90b7bceb6e7df5418fb78d8ee546e97c83a08bbccc01a0644d599ccd2a7c2e0")
b := common.HexToHash("0x222ff5e0b5877792c2bc1670e2ccd0c2c97cd7bb1672a57d598db05092d3d72c")
c := utils.Keccak2(a, b)
assert.NotEmpty(t, c)
assert.NotEqual(t, a, c)
assert.NotEqual(t, b, c)
assert.Equal(t, "0xc0ffbd7f501bd3d49721b0724b2bff657cb2378f15d5a9b97cd7ea5bf630d512", c.Hex())
}
func TestGetBatchRangeFromCalldataV1(t *testing.T) {
calldata, err := os.ReadFile("../testdata/commit-batches-0x3095e91db7ba4a6fbf4654d607db322e58ff5579c502219c8024acaea74cf311.txt")
assert.NoError(t, err)
// multiple batches
batchIndices, startBlocks, finishBlocks, err := utils.GetBatchRangeFromCalldataV1(common.Hex2Bytes(string(calldata[:])))
assert.NoError(t, err)
assert.Equal(t, len(batchIndices), 5)
assert.Equal(t, len(startBlocks), 5)
assert.Equal(t, len(finishBlocks), 5)
assert.Equal(t, batchIndices[0], uint64(1))
assert.Equal(t, batchIndices[1], uint64(2))
assert.Equal(t, batchIndices[2], uint64(3))
assert.Equal(t, batchIndices[3], uint64(4))
assert.Equal(t, batchIndices[4], uint64(5))
assert.Equal(t, startBlocks[0], uint64(1))
assert.Equal(t, startBlocks[1], uint64(6))
assert.Equal(t, startBlocks[2], uint64(7))
assert.Equal(t, startBlocks[3], uint64(19))
assert.Equal(t, startBlocks[4], uint64(20))
assert.Equal(t, finishBlocks[0], uint64(5))
assert.Equal(t, finishBlocks[1], uint64(6))
assert.Equal(t, finishBlocks[2], uint64(18))
assert.Equal(t, finishBlocks[3], uint64(19))
assert.Equal(t, finishBlocks[4], uint64(20))
}

View File

@@ -80,6 +80,7 @@
"batch_commit_time_sec": 1200,
"batch_blocks_limit": 100,
"commit_tx_calldata_size_limit": 200000,
"commit_tx_batch_count_limit": 30,
"public_input_config": {
"max_tx_num": 44,
"padding_tx_hash": "0x0000000000000000000000000000000000000000000000000000000000000000"

View File

@@ -1,6 +1,6 @@
module scroll-tech/bridge
go 1.18
go 1.19
require (
github.com/agiledragon/gomonkey/v2 v2.9.0

View File

@@ -44,6 +44,8 @@ type BatchProposerConfig struct {
CommitTxCalldataSizeLimit uint64 `json:"commit_tx_calldata_size_limit"`
// Commit tx calldata min size limit in bytes
CommitTxCalldataMinSize uint64 `json:"commit_tx_calldata_min_size,omitempty"`
// Max number of batches in a commit transaction
CommitTxBatchCountLimit uint64 `json:"commit_tx_batch_count_limit"`
// The public input hash config
PublicInputConfig *types.PublicInputHashConfig `json:"public_input_config"`
}

View File

@@ -46,6 +46,7 @@ type BatchProposer struct {
commitCalldataSizeLimit uint64
batchDataBufferSizeLimit uint64
commitCalldataMinSize uint64
commitBatchCountLimit int
proofGenerationFreq uint64
batchDataBuffer []*bridgeTypes.BatchData
@@ -72,6 +73,7 @@ func NewBatchProposer(ctx context.Context, cfg *config.BatchProposerConfig, rela
batchCommitTimeSec: cfg.BatchCommitTimeSec,
commitCalldataSizeLimit: cfg.CommitTxCalldataSizeLimit,
commitCalldataMinSize: cfg.CommitTxCalldataMinSize,
commitBatchCountLimit: int(cfg.CommitTxBatchCountLimit),
batchDataBufferSizeLimit: 100*cfg.CommitTxCalldataSizeLimit + 1*1024*1024, // @todo: determine the value.
proofGenerationFreq: cfg.ProofGenerationFreq,
piCfg: cfg.PublicInputConfig,
@@ -202,7 +204,7 @@ func (p *BatchProposer) TryCommitBatches() {
index := 0
commit := false
calldataByteLen := uint64(0)
for ; index < len(p.batchDataBuffer); index++ {
for ; index < len(p.batchDataBuffer) && index < p.commitBatchCountLimit; index++ {
calldataByteLen += bridgeAbi.GetBatchCalldataLength(&p.batchDataBuffer[index].Batch)
if calldataByteLen > p.commitCalldataSizeLimit {
commit = true

View File

@@ -113,11 +113,12 @@ func testBatchProposerBatchGeneration(t *testing.T) {
assert.NoError(t, err)
proposer := NewBatchProposer(context.Background(), &config.BatchProposerConfig{
ProofGenerationFreq: 1,
BatchGasThreshold: 3000000,
BatchTxNumThreshold: 135,
BatchTimeSec: 1,
BatchBlocksLimit: 100,
ProofGenerationFreq: 1,
BatchGasThreshold: 3000000,
BatchTxNumThreshold: 135,
BatchTimeSec: 1,
BatchBlocksLimit: 100,
CommitTxBatchCountLimit: 30,
}, relayer, db)
proposer.TryProposeBatch()
@@ -189,11 +190,12 @@ func testBatchProposerGracefulRestart(t *testing.T) {
assert.Equal(t, batchData2.Hash().Hex(), batchHashes[0])
// test p.recoverBatchDataBuffer().
_ = NewBatchProposer(context.Background(), &config.BatchProposerConfig{
ProofGenerationFreq: 1,
BatchGasThreshold: 3000000,
BatchTxNumThreshold: 135,
BatchTimeSec: 1,
BatchBlocksLimit: 100,
ProofGenerationFreq: 1,
BatchGasThreshold: 3000000,
BatchTxNumThreshold: 135,
BatchTimeSec: 1,
BatchBlocksLimit: 100,
CommitTxBatchCountLimit: 30,
}, relayer, db)
batchHashes, err = blockBatchOrm.GetBlockBatchesHashByRollupStatus(types.RollupPending, math.MaxInt32)

View File

@@ -236,6 +236,7 @@ func prepareAuth(t *testing.T, l2Cli *ethclient.Client, privateKey *ecdsa.Privat
assert.NoError(t, err)
auth.GasPrice, err = l2Cli.SuggestGasPrice(context.Background())
assert.NoError(t, err)
auth.GasLimit = 500000
return auth
}

View File

@@ -1,45 +1,46 @@
GO_VERSION := 1.18
PYTHON_VERSION := 3.10
RUST_VERSION := nightly-2022-12-10
ifeq ($(GO_VERSION),)
GO_VERSION=1.19
endif
ifeq ($(RUST_VERSION),)
RUST_VERSION=nightly-2022-12-10
endif
ifeq ($(PYTHON_VERSION),)
PYTHON_VERSION=3.10
endif
ifeq ($(CUDA_VERSION),)
CUDA_VERSION=11.7.1
endif
.PHONY: all go-alpine-builder rust-builder rust-alpine-builder go-rust-alpine-builder go-rust-builder py-runner
.PHONY: all go-alpine-builder rust-builder rust-alpine-builder go-rust-alpine-builder go-rust-builder cuda-go-rust-builder py-runner
cuda-go-rust-builder:
docker build -t scrolltech/cuda-go-rust-builder:cuda-$(CUDA_VERSION)-go-$(GO_VERSION)-rust-$(RUST_VERSION) -f cuda-go-rust-builder.Dockerfile ./ --build-arg CUDA_VERSION=$(CUDA_VERSION) --build-arg GO_VERSION=$(GO_VERSION) --build-arg RUST_VERSION=$(RUST_VERSION)
go-rust-builder:
docker build -t scrolltech/go-rust-builder:latest -f go-rust-builder.Dockerfile ./
docker image tag scrolltech/go-rust-builder:latest scrolltech/go-rust-builder:go-$(GO_VERSION)-rust-$(RUST_VERSION)
docker build -t scrolltech/go-rust-builder:go-$(GO_VERSION)-rust-$(RUST_VERSION) -f go-rust-builder.Dockerfile ./ --build-arg GO_VERSION=$(GO_VERSION) --build-arg RUST_VERSION=$(RUST_VERSION)
go-alpine-builder:
docker build -t scrolltech/go-alpine-builder:latest -f go-alpine-builder.Dockerfile ./
docker image tag scrolltech/go-alpine-builder:latest scrolltech/go-alpine-builder:$(GO_VERSION)
docker build -t scrolltech/go-alpine-builder:$(GO_VERSION) -f go-alpine-builder.Dockerfile ./ --build-arg GO_VERSION=$(GO_VERSION)
rust-builder:
docker build -t scrolltech/rust-builder:latest -f rust-builder.Dockerfile ./
docker image tag scrolltech/rust-builder:latest scrolltech/rust-builder:$(RUST_VERSION)
docker build -t scrolltech/rust-builder:$(RUST_VERSION) -f rust-builder.Dockerfile ./ --build-arg RUST_VERSION=$(RUST_VERSION)
rust-alpine-builder:
docker build -t scrolltech/rust-alpine-builder:latest -f rust-alpine-builder.Dockerfile ./
docker image tag scrolltech/rust-alpine-builder:latest scrolltech/rust-alpine-builder:$(RUST_VERSION)
docker build -t scrolltech/rust-alpine-builder:$(RUST_VERSION) -f rust-alpine-builder.Dockerfile ./ --build-arg RUST_VERSION=$(RUST_VERSION)
go-rust-alpine-builder:
docker build -t scrolltech/go-rust-alpine-builder:latest -f go-rust-alpine-builder.Dockerfile ./
docker image tag scrolltech/go-rust-alpine-builder:latest scrolltech/go-rust-alpine-builder:go-$(GO_VERSION)-rust-$(RUST_VERSION)
docker build -t scrolltech/go-rust-alpine-builder:go-$(GO_VERSION)-rust-$(RUST_VERSION) -f go-rust-alpine-builder.Dockerfile ./ --build-arg GO_VERSION=$(GO_VERSION) --build-arg RUST_VERSION=$(RUST_VERSION)
py-runner:
docker build -t scrolltech/py-runner:latest -f py-runner.Dockerfile ./
docker image tag scrolltech/py-runner:latest scrolltech/py-runner:$(PYTHON_VERSION)
docker build -t scrolltech/py-runner:$(PYTHON_VERSION) -f py-runner.Dockerfile ./ --build-arg PYTHON_VERSION=$(PYTHON_VERSION)
all: go-alpine-builder rust-builder rust-alpine-builder go-rust-alpine-builder go-rust-builder py-runner
all: go-alpine-builder rust-builder rust-alpine-builder go-rust-alpine-builder go-rust-builder cuda-go-rust-builder py-runner
publish:
docker push scrolltech/go-alpine-builder:latest
docker push scrolltech/go-alpine-builder:$(GO_VERSION)
docker push scrolltech/rust-builder:latest
docker push scrolltech/rust-builder:$(RUST_VERSION)
docker push scrolltech/rust-alpine-builder:latest
docker push scrolltech/rust-alpine-builder:$(RUST_VERSION)
docker push scrolltech/go-rust-alpine-builder:latest
docker push scrolltech/go-rust-alpine-builder:go-$(GO_VERSION)-rust-$(RUST_VERSION)
docker push scrolltech/go-rust-builder:latest
docker push scrolltech/go-rust-builder:go-$(GO_VERSION)-rust-$(RUST_VERSION)
docker push scrolltech/py-runner:latest
docker push scrolltech/cuda-go-rust-builder:cuda-$(CUDA_VERSION)-go-$(GO_VERSION)-rust-$(RUST_VERSION)
docker push scrolltech/py-runner:$(PYTHON_VERSION)

View File

@@ -0,0 +1,35 @@
ARG CUDA_VERSION=11.7.1
ARG GO_VERSION=1.19
ARG RUST_VERSION=nightly-2022-12-10
ARG CARGO_CHEF_TAG=0.1.41
FROM nvidia/cuda:${CUDA_VERSION}-devel-ubuntu22.04
RUN apt-get update
# Install basic packages
RUN apt-get install build-essential curl wget git pkg-config --no-install-recommends -y
# Install dev-packages
RUN apt-get install libclang-dev libssl-dev cmake llvm --no-install-recommends -y
# Install related libs
RUN apt install libprocps-dev libboost-all-dev libmpfr-dev libgmp-dev --no-install-recommends -y
# Clean installed cache
RUN rm -rf /var/lib/apt/lists/*
# Install Rust
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
ENV PATH="/root/.cargo/bin:${PATH}"
ENV CARGO_HOME=/root/.cargo
# Add Toolchain
ARG RUST_VERSION
RUN rustup toolchain install ${RUST_VERSION}
ARG CARGO_CHEF_TAG
RUN cargo install cargo-chef --locked --version ${CARGO_CHEF_TAG} \
&& rm -rf $CARGO_HOME/registry/
# Install Go
ARG GO_VERSION
RUN rm -rf /usr/local/go
RUN wget https://go.dev/dl/go${GO_VERSION}.1.linux-amd64.tar.gz
RUN tar -C /usr/local -xzf go${GO_VERSION}.1.linux-amd64.tar.gz
RUN rm go${GO_VERSION}.1.linux-amd64.tar.gz
ENV PATH="/usr/local/go/bin:${PATH}"

View File

@@ -1,4 +1,6 @@
FROM golang:1.18-alpine
ARG GO_VERSION=1.19
FROM golang:${GO_VERSION}-alpine
# ENV GOPROXY https://goproxy.cn,direct

View File

@@ -1,6 +1,8 @@
FROM golang:1.18-alpine
ARG GO_VERSION=1.19
ARG RUST_VERSION=nightly-2022-12-10
ARG CARGO_CHEF_TAG=0.1.41
ARG DEFAULT_RUST_TOOLCHAIN=nightly-2022-12-10
FROM golang:${GO_VERSION}-alpine
RUN apk add --no-cache gcc musl-dev linux-headers git ca-certificates openssl-dev
@@ -24,12 +26,14 @@ RUN set -eux; \
wget "$url"; \
chmod +x rustup-init;
RUN ./rustup-init -y --no-modify-path --default-toolchain ${DEFAULT_RUST_TOOLCHAIN}; \
ARG RUST_VERSION
RUN ./rustup-init -y --no-modify-path --default-toolchain ${RUST_VERSION}; \
rm rustup-init; \
chmod -R a+w $RUSTUP_HOME $CARGO_HOME; \
rustup --version; \
cargo --version; \
rustc --version;
ARG CARGO_CHEF_TAG
RUN cargo install cargo-chef --locked --version ${CARGO_CHEF_TAG} \
&& rm -rf $CARGO_HOME/registry/

View File

@@ -1,10 +1,13 @@
ARG GO_VERSION=1.19
ARG RUST_VERSION=nightly-2022-12-10
ARG CARGO_CHEF_TAG=0.1.41
FROM ubuntu:20.04
RUN apt-get update && ln -fs /usr/share/zoneinfo/America/New_York /etc/localtime
# Install basic packages
RUN apt-get install build-essential curl wget git pkg-config -y
# Install dev-packages
RUN apt-get install libclang-dev libssl-dev llvm -y
@@ -12,23 +15,17 @@ RUN apt-get install libclang-dev libssl-dev llvm -y
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
ENV PATH="/root/.cargo/bin:${PATH}"
ENV CARGO_HOME=/root/.cargo
# Add Toolchain
RUN rustup toolchain install nightly-2022-12-10
# TODO: make this ARG
ENV CARGO_CHEF_TAG=0.1.41
ARG RUST_VERSION
RUN rustup toolchain install ${RUST_VERSION}
ARG CARGO_CHEF_TAG
RUN cargo install cargo-chef --locked --version ${CARGO_CHEF_TAG} \
&& rm -rf $CARGO_HOME/registry/
# Install Go
ARG GO_VERSION
RUN rm -rf /usr/local/go
# for 1.17
# RUN wget https://go.dev/dl/go1.17.13.linux-amd64.tar.gz
# RUN tar -C /usr/local -xzf go1.17.13.linux-amd64.tar.gz
# for 1.18
RUN wget https://go.dev/dl/go1.18.9.linux-amd64.tar.gz
RUN tar -C /usr/local -xzf go1.18.9.linux-amd64.tar.gz
RUN wget https://go.dev/dl/go${GO_VERSION}.1.linux-amd64.tar.gz
RUN tar -C /usr/local -xzf go${GO_VERSION}.1.linux-amd64.tar.gz
RUN rm go${GO_VERSION}.1.linux-amd64.tar.gz
ENV PATH="/usr/local/go/bin:${PATH}"

View File

@@ -1,3 +1,4 @@
FROM python:3.10-alpine3.15
ARG PYTHON_VERSION=3.10
FROM python:${PYTHON_VERSION}-alpine
RUN apk add --no-cache gcc g++ make musl-dev

View File

@@ -1,7 +1,8 @@
ARG ALPINE_VERSION=3.15
FROM alpine:${ALPINE_VERSION}
ARG RUST_VERSION=nightly-2022-12-10
ARG CARGO_CHEF_TAG=0.1.41
ARG DEFAULT_RUST_TOOLCHAIN=nightly-2022-12-10
FROM alpine:${ALPINE_VERSION}
RUN apk add --no-cache \
ca-certificates \
@@ -27,12 +28,14 @@ RUN set -eux; \
wget "$url"; \
chmod +x rustup-init;
RUN ./rustup-init -y --no-modify-path --default-toolchain ${DEFAULT_RUST_TOOLCHAIN}; \
ARG RUST_VERSION
RUN ./rustup-init -y --no-modify-path --default-toolchain ${RUST_VERSION}; \
rm rustup-init; \
chmod -R a+w $RUSTUP_HOME $CARGO_HOME; \
rustup --version; \
cargo --version; \
rustc --version;
ARG CARGO_CHEF_TAG
RUN cargo install cargo-chef --locked --version ${CARGO_CHEF_TAG} \
&& rm -rf $CARGO_HOME/registry/

View File

@@ -1,3 +1,6 @@
ARG RUST_VERSION=nightly-2022-12-10
ARG CARGO_CHEF_TAG=0.1.41
FROM ubuntu:20.04
RUN apt-get update && ln -fs /usr/share/zoneinfo/America/New_York /etc/localtime
@@ -11,6 +14,10 @@ RUN apt-get install libclang-dev libssl-dev llvm -y
# Install Rust
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
ENV PATH="/root/.cargo/bin:${PATH}"
ENV CARGO_HOME=/root/.cargo
# Add Toolchain
RUN rustup toolchain install nightly-2022-12-10
ARG RUST_VERSION
RUN rustup toolchain install ${RUST_VERSION}
ARG CARGO_CHEF_TAG
RUN cargo install cargo-chef --locked --version ${CARGO_CHEF_TAG} \
&& rm -rf $CARGO_HOME/registry/

View File

@@ -15,7 +15,7 @@ import (
const (
// GolangCIVersion to be used for linting.
GolangCIVersion = "github.com/golangci/golangci-lint/cmd/golangci-lint@v1.50.0"
GolangCIVersion = "github.com/golangci/golangci-lint/cmd/golangci-lint@v1.52.2"
)
// GOBIN environment variable.

View File

@@ -1,14 +0,0 @@
#!/bin/bash
set -uex
${GOROOT}/bin/bin/gocover-cobertura < coverage.bridge.txt > coverage.bridge.xml
${GOROOT}/bin/bin/gocover-cobertura < coverage.db.txt > coverage.db.xml
${GOROOT}/bin/bin/gocover-cobertura < coverage.common.txt > coverage.common.xml
${GOROOT}/bin/bin/gocover-cobertura < coverage.coordinator.txt > coverage.coordinator.xml
#${GOROOT}/bin/bin/gocover-cobertura < coverage.integration.txt > coverage.integration.xml
npx cobertura-merge -o cobertura.xml \
package1=coverage.bridge.xml \
package2=coverage.db.xml \
package3=coverage.common.xml \
package4=coverage.coordinator.xml
# package5=coverage.integration.xml

View File

@@ -1,85 +0,0 @@
imagePrefix = 'scrolltech'
credentialDocker = 'dockerhub'
TAGNAME = ''
pipeline {
agent any
options {
timeout (20)
}
tools {
go 'go-1.18'
nodejs "nodejs"
}
environment {
GO111MODULE = 'on'
PATH="/home/ubuntu/.cargo/bin:$PATH"
// LOG_DOCKER = 'true'
}
stages {
stage('Tag') {
steps {
script {
TAGNAME = sh(returnStdout: true, script: 'git tag -l --points-at HEAD')
sh "echo ${TAGNAME}"
// ...
}
}
}
stage('Build') {
environment {
// Extract the username and password of our credentials into "DOCKER_CREDENTIALS_USR" and "DOCKER_CREDENTIALS_PSW".
// (NOTE 1: DOCKER_CREDENTIALS will be set to "your_username:your_password".)
// The new variables will always be YOUR_VARIABLE_NAME + _USR and _PSW.
// (NOTE 2: You can't print credentials in the pipeline for security reasons.)
DOCKER_CREDENTIALS = credentials('dockerhub')
}
steps {
withCredentials([usernamePassword(credentialsId: "${credentialDocker}", passwordVariable: 'dockerPassword', usernameVariable: 'dockerUser')]) {
// Use a scripted pipeline.
script {
stage('Push image') {
if (TAGNAME == ""){
return;
}
sh "docker login --username=$dockerUser --password=$dockerPassword"
catchError(buildResult: 'SUCCESS', stageResult: 'SUCCESS') {
script {
try {
sh "docker manifest inspect scrolltech/bridge:$TAGNAME > /dev/null"
} catch (e) {
// only build if the tag non existed
//sh "docker login --username=${dockerUser} --password=${dockerPassword}"
sh "make -C bridge docker"
sh "docker tag scrolltech/bridge:latest scrolltech/bridge:${TAGNAME}"
sh "docker push scrolltech/bridge:${TAGNAME}"
throw e
}
}
}
catchError(buildResult: 'SUCCESS', stageResult: 'SUCCESS') {
script {
try {
sh "docker manifest inspect scrolltech/coordinator:$TAGNAME > /dev/null"
} catch (e) {
// only build if the tag non existed
//sh "docker login --username=${dockerUser} --password=${dockerPassword}"
sh "make -C coordinator docker"
sh "docker tag scrolltech/coordinator:latest scrolltech/coordinator:${TAGNAME}"
sh "docker push scrolltech/coordinator:${TAGNAME}"
throw e
}
}
}
}
}
}
}
}
}
post {
always {
cleanWs()
slackSend(message: "${JOB_BASE_NAME} ${GIT_COMMIT} #${TAGNAME} Tag build ${currentBuild.result}")
}
}
}

38
codecov.yml Normal file
View File

@@ -0,0 +1,38 @@
coverage:
status:
project: off
patch: off
flag_management:
default_rules:
carryforward: true
individual_flags:
- name: bridge
statuses:
- type: project
target: auto
threshold: 1%
- name: bridge-history-api
statuses:
- type: project
target: auto
threshold: 1%
- name: common
statuses:
- type: project
target: auto
threshold: 1%
- name: coordinator
statuses:
- type: project
target: auto
threshold: 1%
- name: database
statuses:
- type: project
target: auto
threshold: 1%
- name: roller
statuses:
- type: project
target: auto
threshold: 1%

View File

@@ -1,6 +1,6 @@
module scroll-tech/common
go 1.18
go 1.19
require (
github.com/docker/docker v20.10.21+incompatible

View File

@@ -29,7 +29,7 @@
{
"type": 2,
"nonce": 2,
"txHash": "0xaaaeb971adfac989c7db5426737bc2932756091a5730ea6d5324f93e4cff9713",
"txHash": "0x6b50040f5f14bad253f202b0775d6742131bcaee6b992f05578386f00e53b7e4",
"gas": 1152994,
"gasPrice": "0x3b9b0a17",
"from": "0x1c5a77d9fa7ef466951b2f01f724bca3a5820b63",

868
common/testdata/blockTrace_04.json vendored Normal file
View File

@@ -0,0 +1,868 @@
{
"coinbase": {
"address": "0x5300000000000000000000000000000000000005",
"nonce": 0,
"balance": "0x2aa86921dcd2c0",
"keccakCodeHash": "0x256e306f068f0847c8aab5819879b2ff45c021ce2e2f428be51be663415b1d60",
"poseidonCodeHash": "0x2c49d7de76e39008575f2f090bb3e90912bad475ea8102c8565c249a75575df5",
"codeSize": 1652
},
"header": {
"parentHash": "0xe761181afb179bc4e6848ecc4e32af82c0eeff4aca77024f985d1dffb0ba0013",
"sha3Uncles": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
"miner": "0x0000000000000000000000000000000000000000",
"stateRoot": "0x155c42b3ffa9b88987b02bc8f89fb31f2b555bb8bff971d6fcd92e04a144c248",
"transactionsRoot": "0x891f5907147c83867e1e7b200b9d26fb43c3c08f81202d04235c84a2aa79f72f",
"receiptsRoot": "0x7ad169feb178baf74f7c0a12a28570bd69bd10e616acad2caea09a55fd1fb541",
"logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"difficulty": "0x2",
"number": "0xd",
"gasLimit": "0x7a1200",
"gasUsed": "0x5dc0",
"timestamp": "0x646b6e13",
"extraData": "0xd983030201846765746889676f312e31382e3130856c696e7578000000000000f942387d5a3dba7786280b806f022e2afaec53939149ac7b132b4ef1cf5cdf393d688543d984ae15b1896185ea13f9e7ae18b22b65e5ffec9128195d7cde6fa700",
"mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
"nonce": "0x0000000000000000",
"baseFeePerGas": null,
"hash": "0x09f75bc27efe18cd77a82491370442ea5a6066e910b73dc99fe1caff950c357b"
},
"transactions": [
{
"type": 126,
"nonce": 10,
"txHash": "0xed6dff31c5516b3b9d169781865276cf27501aadd45c131bf8c841c5e619e56a",
"gas": 24000,
"gasPrice": "0x0",
"from": "0x478cdd110520a8e733e2acf9e543d2c687ea5239",
"to": "0x1a258d17bf244c4df02d40343a7626a9d321e105",
"chainId": "0x0",
"value": "0x0",
"data": "0x8ef1332e000000000000000000000000ea08a65b1829af779261e768d609e59279b510f2000000000000000000000000f2ec6b6206f6208e8f9b394efc1a01c1cbde77750000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000a4232e87480000000000000000000000002b5ad5c4795c026514f8317c7a215e218dccd6cf0000000000000000000000002b5ad5c4795c026514f8317c7a215e218dccd6cf00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"isCreate": false,
"v": "0x0",
"r": "0x0",
"s": "0x0"
},
{
"type": 0,
"nonce": 11,
"txHash": "0xed6dff31c5516b3b9d169781865276cf27501aadd45c131bf8c841c5e619e56a",
"gas": 24000,
"gasPrice": "0x0",
"from": "0x478cdd110520a8e733e2acf9e543d2c687ea5239",
"to": "0x1a258d17bf244c4df02d40343a7626a9d321e105",
"chainId": "0x0",
"value": "0x0",
"data": "0x",
"isCreate": false,
"v": "0x0",
"r": "0x0",
"s": "0x0"
}
],
"storageTrace": {
"rootBefore": "0x16d403e1c55dee3e020457262414ee7a20596922c08cac631385d8ea6d6c2c2b",
"rootAfter": "0x155c42b3ffa9b88987b02bc8f89fb31f2b555bb8bff971d6fcd92e04a144c248",
"proofs": {
"0x1a258d17bF244C4dF02d40343a7626A9D321e105": [
"0x000f2d6436a450dc3daf4f111527f3e187a9641e7c5cbc4f53a386e6e4114bb8202cc33de5af63f5deca2409302103a4523463a3a16529835d526795e8966079db",
"0x0029ce00b3e5ddca3bd22d3a923b95239ed11243363803b8e1f5a89fb37ee3c6e52c0d8469864d5ee8e0d62944e8dc1de68f78b094d3ef7cf72a21b372866bab0a",
"0x001dcee8089ea21f679f1af199cc93ccb35fdea1257b9ffeac0ae5c89654a0dbce20790d9030fd3f822620f7395f1af3ca53789e7451f811c2364f2b4fa19be9fd",
"0x000d62fbf3a623b87d67d8f97132a8f1759360c03c1b78ea3654238eb6c72fd5dd0742c02437cc0294c49133a28968ba1f913963d9c2892254da675958cd4a4b2e",
"0x0026875849a967c3af8bbd7ac6efb4ef8250efaee44c8bd85ac026d541c7f509ac18ae138a98367696a39f7abe0a53fd3b32283fa843bdc4a2485d65b3b9651670",
"0x0125375fd5ae821cd3e835e2fba4ae79971635b7288d549ba8ba66bea36603686c05080000000000000000000000000000000000000000000000000867000000000000000130644e72e131a029b85045b68181585d2833e84879b9705b0e1847ce1160000030221b0e9cf191ce544dcc5c8927fd08af82cb88be110d9533468ffd2d575aed31f2125c021fb94759cb1993a2f07eae01792311e13f209441ff8969cf1eb8351cafbbe8f01ed4c292d9a27be523919a274441a076b20c7d713d192dbe6485c2201a258d17bf244c4df02d40343a7626a9d321e105000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
],
"0x478CDd110520a8e733e2ACF9e543d2c687EA5239": [
"0x000f2d6436a450dc3daf4f111527f3e187a9641e7c5cbc4f53a386e6e4114bb8202cc33de5af63f5deca2409302103a4523463a3a16529835d526795e8966079db",
"0x0029ce00b3e5ddca3bd22d3a923b95239ed11243363803b8e1f5a89fb37ee3c6e52c0d8469864d5ee8e0d62944e8dc1de68f78b094d3ef7cf72a21b372866bab0a",
"0x000bf7d923da6cc335d4074262981bf4615b43a8eb2a4dd6f2eda4fd8e1503d9311c4e63762bb10044749243a2b52db21797da53a89ba6b8ceb5cee1596150ac45",
"0x002b29daef215b12b331bf75a98e595b8a10a91928f479cca3562db3859315055a1cb697055013d78d58072071584b3e40e8d846948c8e829cbbe9915e4bcf08f0",
"0x00000000000000000000000000000000000000000000000000000000000000000007b1a84d4b19493ba2ca6a59dbc42d0e8559a7f8fb0c066bb8b1d90ceee9ce5c",
"0x0000000000000000000000000000000000000000000000000000000000000000000e9e173703b7c89f67443e861d959df35575c16617ea238fd235d8612f9020ba",
"0x0000000000000000000000000000000000000000000000000000000000000000000ea71dd32b28e075772420197e740ad0ed7990e3f6e5be7f5051f0c0709defce",
"0x000000000000000000000000000000000000000000000000000000000000000000186f00dca57567f28233cef5140efd49b1624b0ec3aef5b7f7ee42f03c3b6231",
"0x0006aac99418e9b09baea374df117e64523910d04427251eec6a9b482b6433bc54186c0fb6b2462a9c851df47ab11054dac43ed5b3f9d8d8a5fcf2fd0f9eb3e147",
"0x0109c2edb6138e8d6dc8f0b8b5ae98dd721c7053061887757f6749c484bddf92fa05080000000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a4702098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b6486420478cdd110520a8e733e2acf9e543d2c687ea5239000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
],
"0x5300000000000000000000000000000000000000": [
"0x000f2d6436a450dc3daf4f111527f3e187a9641e7c5cbc4f53a386e6e4114bb8202cc33de5af63f5deca2409302103a4523463a3a16529835d526795e8966079db",
"0x0029ce00b3e5ddca3bd22d3a923b95239ed11243363803b8e1f5a89fb37ee3c6e52c0d8469864d5ee8e0d62944e8dc1de68f78b094d3ef7cf72a21b372866bab0a",
"0x001dcee8089ea21f679f1af199cc93ccb35fdea1257b9ffeac0ae5c89654a0dbce20790d9030fd3f822620f7395f1af3ca53789e7451f811c2364f2b4fa19be9fd",
"0x000d62fbf3a623b87d67d8f97132a8f1759360c03c1b78ea3654238eb6c72fd5dd0742c02437cc0294c49133a28968ba1f913963d9c2892254da675958cd4a4b2e",
"0x0026875849a967c3af8bbd7ac6efb4ef8250efaee44c8bd85ac026d541c7f509ac18ae138a98367696a39f7abe0a53fd3b32283fa843bdc4a2485d65b3b9651670",
"0x000a3197466e4643551413444b60bbf8ab0ced04566326492fdf1993586eec3fe10000000000000000000000000000000000000000000000000000000000000000",
"0x002143f0cbad38f9696bb9c0be84281e5b517a06983edef7c75485b7a06473c97921dd9af8de7aade9fba53909b1a98ae938236ceec8ba6346ba3ba75c039194d7",
"0x0115d04fcf1fe3d9a4cc7a76b70fafcd7b9304b42108af39d9e500be391563775c0508000000000000000000000000000000000000000000000000064d000000000000000000000000000000000000000000000000000000000000000000000000000000002908ab50d1edc9dac80a344f44731acf807809c545e3388816b97a9882b5d4f974ae902ff6a84825a9cde7cc5f26e8c414e88139716c3423ed908f0a60c996011c70d94e9dc7c85d39f6877b01e59a87c057882957d9fd16c55025dfdcaa4d93205300000000000000000000000000000000000000000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
],
"0x5300000000000000000000000000000000000002": [
"0x000f2d6436a450dc3daf4f111527f3e187a9641e7c5cbc4f53a386e6e4114bb8202cc33de5af63f5deca2409302103a4523463a3a16529835d526795e8966079db",
"0x000150eaa497ee8904a3d2dc8350c03963fb1786ea5253d5cc16f321afcd862cee107e99fa497bffacfb8ab50a44b93c9a74bc7c669323c7fbd0560a657342c55a",
"0x000877a6983a09f78254ca94a086eb673296f5583aa33855bfbdbe6d2fadf0ff0107b2e01ad456a3ec4c88478c604ad6a15c6fb572259e49ef4cc781940fe1375e",
"0x0013b6a97296cf294d19f634904a7fa973d9714b90cc42e0456ad428b7278f338e0accad868d7f4aaa755b29eae6ad523415a9df210ffced28d7d33fa6d5a319b3",
"0x0011de0e672d258d43c785592fc939bc105441bafc9c1455901723358b0a73d5cc29562af63a2293f036058180ce56f5269c6a3d4d18d8e1dc75ef03cb8f51f8b9",
"0x01236b0ff4611519fb52869dd99bedcb730ebe17544687c5064da49f42f741831d05080000000000000000000000000000000000000000000000000873000000000000000000000000000000000000000000000000000000000000000000000000000000001bd955d4ef171429eb11fade67006376e84bf94630ddb9b9948c3f385ce0f05aa48c68219d344cebd30fca18d0777f587e55052ae6161c88fa4c16407211ddaa0d39d683afa3720f93c44224e2b95a5871a5a2207b5323f7fbf8f1862120ba90205300000000000000000000000000000000000002000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
],
"0x5300000000000000000000000000000000000005": [
"0x000f2d6436a450dc3daf4f111527f3e187a9641e7c5cbc4f53a386e6e4114bb8202cc33de5af63f5deca2409302103a4523463a3a16529835d526795e8966079db",
"0x0029ce00b3e5ddca3bd22d3a923b95239ed11243363803b8e1f5a89fb37ee3c6e52c0d8469864d5ee8e0d62944e8dc1de68f78b094d3ef7cf72a21b372866bab0a",
"0x000bf7d923da6cc335d4074262981bf4615b43a8eb2a4dd6f2eda4fd8e1503d9311c4e63762bb10044749243a2b52db21797da53a89ba6b8ceb5cee1596150ac45",
"0x002b29daef215b12b331bf75a98e595b8a10a91928f479cca3562db3859315055a1cb697055013d78d58072071584b3e40e8d846948c8e829cbbe9915e4bcf08f0",
"0x011facf302b106912bccc8194dff4cb12139e7f04288d3f5eefb57ccf4d842ba22050800000000000000000000000000000000000000000000000006740000000000000000000000000000000000000000000000000000000000000000002aa86921dcd2c018f4988204e816e17e42d9f9a2a468d8ca70ad453a88d3e371a0d9f743b799a6256e306f068f0847c8aab5819879b2ff45c021ce2e2f428be51be663415b1d602c49d7de76e39008575f2f090bb3e90912bad475ea8102c8565c249a75575df5205300000000000000000000000000000000000005000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
]
},
"storageProofs": {
"0x1a258d17bF244C4dF02d40343a7626A9D321e105": {
"0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103": [
"0x001914b8a8cb4d4339d89ed1d5e6cd54ec609082fdf42fadb2d4101f3214f2a2290a1746dfbdf492c00e2854b46eda6adad88ad1b0583997db4121cb7d8e6de5ca",
"0x00084878451370def5a5648862c037adb6ae24f29b9237a1823638ca29d573bdd42446af3926a42a7e8b65f9a5fdd5a00e82e4f2b9684816fdc5d52c238bef604a",
"0x00027f6e365685a83e63cde58e13d22b99c130a578178f8198d755171a2ff97bf303e187b8ea9652424a9d9dac9bc16796838b196f141c6db57136643f22b48468",
"0x00149dad479c283104bb461dcce598d82aacff80a5844d863d8f64e0d3f3e83b1a0000000000000000000000000000000000000000000000000000000000000000",
"0x001f232429e01853a7456bc8bb4cbc3a35c132f7783e2b300306bceb64a44ce81e0000000000000000000000000000000000000000000000000000000000000000",
"0x0027e1c425d61d4468534c93b8aa80c34bbdea9ec2d69df7a730ecacf0089b22640000000000000000000000000000000000000000000000000000000000000000",
"0x001f4bdfdda0df475064a0ea35302dddc6401b8c93afad9a7569afb9f2534750560000000000000000000000000000000000000000000000000000000000000000",
"0x0001fc65caf9a60abae81bcb17c4854fa114100528e73ab1e649fac03ed9fa764e304459eb829e92aa3009534c4eba916b2900783c694385d2e7f87004e7649215",
"0x01249c7b39f739f430be8e1e2cae0f1db06dfe2f8d4cc631d312d5b98efb3e7402010100000000000000000000000000008eebfef33eb00149852cadb631838ad9bfcce84820b53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
]
},
"0x5300000000000000000000000000000000000000": {
"0x0000000000000000000000000000000000000000000000000000000000000000": [
"0x0004f706d28ba7344cc73128f383e7f4df4c79f296a56e1bbc24cdfab5bc4cba5c2a970eaf68f6e47243e30bea39087adc0082afa5fd55fc5537baccd03f786953",
"0x00296af6438bc81ff661ef6d1bb16d33d6784e88ae39ff28258e56e4e72d5607052bb61b23d947a704c29df01936e7c557bf9ec541243566a336b43f8aeca37eed",
"0x001750ff1780c9b253cfcbd6274a4f79f3a95819e0856c31f0a6025e30ac3a5b261b73cc5623d88d2687f0fa6006bc823149c779b9e751477a6f2b83773062ddbe",
"0x0004c8c2bf27ee6712f4175555679ff662b9423a1d7205fe31e77999106cfb5a2f0efef64a4ef3d151d1364174e0e72745aeee51bf93fb17f8071e6daf4571a736",
"0x001de6dfed408db1b0cf580652da17c9277834302d9ee2c39ab074675ca61fd9e02ea58d0958b74734329987e16d8afa4d83a7acc46417a7f7dbc1fd42e305b394",
"0x001dd3e7dce636d92fdb4dd8b65cb4e5b8ffd3d64e54a51d93a527826bb1ec3a480000000000000000000000000000000000000000000000000000000000000000",
"0x02",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
]
},
"0x5300000000000000000000000000000000000002": {
"0x0000000000000000000000000000000000000000000000000000000000000001": [
"0x00024a2d3ee220db30dece4b39c0cffc2ba97ddded52a3f2da3aeed1f485d0a7220000000000000000000000000000000000000000000000000000000000000000",
"0x001da3cd3096ffd62c95bad392eedc1c578e7ccf248898c49c5ed82abb49a4b31a2b63c0d58a64939cf9026618503b904e267eeb0e465e15812b85485e81fb856c",
"0x01232927899d46fea05cc897a4f4671f808aa83c4eaf89396dfab15480fee91e8e010100000000000000000000000000005300000000000000000000000000000000000003200000000000000000000000000000000000000000000000000000000000000004",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
],
"0x0000000000000000000000000000000000000000000000000000000000000002": [
"0x00024a2d3ee220db30dece4b39c0cffc2ba97ddded52a3f2da3aeed1f485d0a7220000000000000000000000000000000000000000000000000000000000000000",
"0x001da3cd3096ffd62c95bad392eedc1c578e7ccf248898c49c5ed82abb49a4b31a2b63c0d58a64939cf9026618503b904e267eeb0e465e15812b85485e81fb856c",
"0x012098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864010100000000000000000000000000006f4c950442e1af093bcff730381e63ae9171b87a200000000000000000000000000000000000000000000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
],
"0x0000000000000000000000000000000000000000000000000000000000000003": [
"0x00024a2d3ee220db30dece4b39c0cffc2ba97ddded52a3f2da3aeed1f485d0a7220000000000000000000000000000000000000000000000000000000000000000",
"0x001da3cd3096ffd62c95bad392eedc1c578e7ccf248898c49c5ed82abb49a4b31a2b63c0d58a64939cf9026618503b904e267eeb0e465e15812b85485e81fb856c",
"0x012098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864010100000000000000000000000000006f4c950442e1af093bcff730381e63ae9171b87a200000000000000000000000000000000000000000000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
]
}
}
},
"executionResults": [
{
"gas": 24000,
"failed": true,
"returnValue": "",
"from": {
"address": "0x478cdd110520a8e733e2acf9e543d2c687ea5239",
"nonce": 10,
"balance": "0x0",
"keccakCodeHash": "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
"poseidonCodeHash": "0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
"codeSize": 0
},
"to": {
"address": "0x1a258d17bf244c4df02d40343a7626a9d321e105",
"nonce": 1,
"balance": "0x30644e72e131a029b85045b68181585d2833e84879b9705b0e1847ce11600000",
"keccakCodeHash": "0x31f2125c021fb94759cb1993a2f07eae01792311e13f209441ff8969cf1eb835",
"poseidonCodeHash": "0x1cafbbe8f01ed4c292d9a27be523919a274441a076b20c7d713d192dbe6485c2",
"codeSize": 2151
},
"accountAfter": [
{
"address": "0x478cdd110520a8e733e2acf9e543d2c687ea5239",
"nonce": 11,
"balance": "0x0",
"keccakCodeHash": "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
"poseidonCodeHash": "0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
"codeSize": 0
},
{
"address": "0x1a258d17bf244c4df02d40343a7626a9d321e105",
"nonce": 1,
"balance": "0x30644e72e131a029b85045b68181585d2833e84879b9705b0e1847ce11600000",
"keccakCodeHash": "0x31f2125c021fb94759cb1993a2f07eae01792311e13f209441ff8969cf1eb835",
"poseidonCodeHash": "0x1cafbbe8f01ed4c292d9a27be523919a274441a076b20c7d713d192dbe6485c2",
"codeSize": 2151
},
{
"address": "0x5300000000000000000000000000000000000005",
"nonce": 0,
"balance": "0x2aa86921dcd2c0",
"keccakCodeHash": "0x256e306f068f0847c8aab5819879b2ff45c021ce2e2f428be51be663415b1d60",
"poseidonCodeHash": "0x2c49d7de76e39008575f2f090bb3e90912bad475ea8102c8565c249a75575df5",
"codeSize": 1652
}
],
"poseidonCodeHash": "0x1cafbbe8f01ed4c292d9a27be523919a274441a076b20c7d713d192dbe6485c2",
"byteCode": "0x60806040526004361061004e5760003560e01c80633659cfe6146100655780634f1ef286146100855780635c60da1b146100985780638f283970146100c9578063f851a440146100e95761005d565b3661005d5761005b6100fe565b005b61005b6100fe565b34801561007157600080fd5b5061005b6100803660046106f1565b610118565b61005b61009336600461070c565b61015f565b3480156100a457600080fd5b506100ad6101d0565b6040516001600160a01b03909116815260200160405180910390f35b3480156100d557600080fd5b5061005b6100e43660046106f1565b61020b565b3480156100f557600080fd5b506100ad610235565b61010661029b565b61011661011161033a565b610344565b565b610120610368565b6001600160a01b0316336001600160a01b03161415610157576101548160405180602001604052806000815250600061039b565b50565b6101546100fe565b610167610368565b6001600160a01b0316336001600160a01b031614156101c8576101c38383838080601f0160208091040260200160405190810160405280939291908181526020018383808284376000920191909152506001925061039b915050565b505050565b6101c36100fe565b60006101da610368565b6001600160a01b0316336001600160a01b03161415610200576101fb61033a565b905090565b6102086100fe565b90565b610213610368565b6001600160a01b0316336001600160a01b0316141561015757610154816103c6565b600061023f610368565b6001600160a01b0316336001600160a01b03161415610200576101fb610368565b6060610285838360405180606001604052806027815260200161080b6027913961041a565b9392505050565b6001600160a01b03163b151590565b6102a3610368565b6001600160a01b0316336001600160a01b031614156101165760405162461bcd60e51b815260206004820152604260248201527f5472616e73706172656e745570677261646561626c6550726f78793a2061646d60448201527f696e2063616e6e6f742066616c6c6261636b20746f2070726f78792074617267606482015261195d60f21b608482015260a4015b60405180910390fd5b60006101fb6104f7565b3660008037600080366000845af43d6000803e808015610363573d6000f35b3d6000fd5b60007fb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d61035b546001600160a01b0316919050565b6103a48361051f565b6000825111806103b15750805b156101c3576103c08383610260565b50505050565b7f7e644d79422f17c01e4894b5f4f588d331ebfa28653d42ae832dc59e38c9798f6103ef610368565b604080516001600160a01b03928316815291841660208301520160405180910390a16101548161055f565b60606001600160a01b0384163b6104825760405162461bcd60e51b815260206004820152602660248201527f416464726573733a2064656c65676174652063616c6c20746f206e6f6e2d636f6044820152651b9d1c9858dd60d21b6064820152608401610331565b600080856001600160a01b03168560405161049d91906107bb565b600060405180830381855af49150503d80600081146104d8576040519150601f19603f3d011682016040523d82523d6000602084013e6104dd565b606091505b50915091506104ed828286610608565b9695505050505050565b60007f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc61038c565b61052881610641565b6040516001600160a01b038216907fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b90600090a250565b6001600160a01b0381166105c45760405162461bcd60e51b815260206004820152602660248201527f455243313936373a206e65772061646d696e20697320746865207a65726f206160448201526564647265737360d01b6064820152608401610331565b807fb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d61035b80546001600160a01b0319166001600160a01b039290921691909117905550565b60608315610617575081610285565b8251156106275782518084602001fd5b8160405162461bcd60e51b815260040161033191906107d7565b6001600160a01b0381163b6106ae5760405162461bcd60e51b815260206004820152602d60248201527f455243313936373a206e657720696d706c656d656e746174696f6e206973206e60448201526c1bdd08184818dbdb9d1c9858dd609a1b6064820152608401610331565b807f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc6105e7565b80356001600160a01b03811681146106ec57600080fd5b919050565b60006020828403121561070357600080fd5b610285826106d5565b60008060006040848603121561072157600080fd5b61072a846106d5565b9250602084013567ffffffffffffffff8082111561074757600080fd5b818601915086601f83011261075b57600080fd5b81358181111561076a57600080fd5b87602082850101111561077c57600080fd5b6020830194508093505050509250925092565b60005b838110156107aa578181015183820152602001610792565b838111156103c05750506000910152565b600082516107cd81846020870161078f565b9190910192915050565b60208152600082518060208401526107f681604085016020870161078f565b601f01601f1916919091016040019291505056fe416464726573733a206c6f772d6c6576656c2064656c65676174652063616c6c206661696c6564a2646970667358221220366737524a7ac8fa76e3b2cd04bb1e0b8aa75e165c32f59b0076ead59d529de564736f6c634300080a0033",
"structLogs": [
{
"pc": 0,
"op": "PUSH1",
"gas": 320,
"gasCost": 3,
"depth": 1
},
{
"pc": 2,
"op": "PUSH1",
"gas": 317,
"gasCost": 3,
"depth": 1,
"stack": [
"0x80"
]
},
{
"pc": 4,
"op": "MSTORE",
"gas": 314,
"gasCost": 12,
"depth": 1,
"stack": [
"0x80",
"0x40"
]
},
{
"pc": 5,
"op": "PUSH1",
"gas": 302,
"gasCost": 3,
"depth": 1
},
{
"pc": 7,
"op": "CALLDATASIZE",
"gas": 299,
"gasCost": 2,
"depth": 1,
"stack": [
"0x4"
]
},
{
"pc": 8,
"op": "LT",
"gas": 297,
"gasCost": 3,
"depth": 1,
"stack": [
"0x4",
"0x184"
]
},
{
"pc": 9,
"op": "PUSH2",
"gas": 294,
"gasCost": 3,
"depth": 1,
"stack": [
"0x0"
]
},
{
"pc": 12,
"op": "JUMPI",
"gas": 291,
"gasCost": 10,
"depth": 1,
"stack": [
"0x0",
"0x4e"
]
},
{
"pc": 13,
"op": "PUSH1",
"gas": 281,
"gasCost": 3,
"depth": 1
},
{
"pc": 15,
"op": "CALLDATALOAD",
"gas": 278,
"gasCost": 3,
"depth": 1,
"stack": [
"0x0"
]
},
{
"pc": 16,
"op": "PUSH1",
"gas": 275,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e000000000000000000000000ea08a65b1829af779261e768d609e592"
]
},
{
"pc": 18,
"op": "SHR",
"gas": 272,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e000000000000000000000000ea08a65b1829af779261e768d609e592",
"0xe0"
]
},
{
"pc": 19,
"op": "DUP1",
"gas": 269,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e"
]
},
{
"pc": 20,
"op": "PUSH4",
"gas": 266,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e"
]
},
{
"pc": 25,
"op": "EQ",
"gas": 263,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e",
"0x3659cfe6"
]
},
{
"pc": 26,
"op": "PUSH2",
"gas": 260,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0"
]
},
{
"pc": 29,
"op": "JUMPI",
"gas": 257,
"gasCost": 10,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0",
"0x65"
]
},
{
"pc": 30,
"op": "DUP1",
"gas": 247,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e"
]
},
{
"pc": 31,
"op": "PUSH4",
"gas": 244,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e"
]
},
{
"pc": 36,
"op": "EQ",
"gas": 241,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e",
"0x4f1ef286"
]
},
{
"pc": 37,
"op": "PUSH2",
"gas": 238,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0"
]
},
{
"pc": 40,
"op": "JUMPI",
"gas": 235,
"gasCost": 10,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0",
"0x85"
]
},
{
"pc": 41,
"op": "DUP1",
"gas": 225,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e"
]
},
{
"pc": 42,
"op": "PUSH4",
"gas": 222,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e"
]
},
{
"pc": 47,
"op": "EQ",
"gas": 219,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e",
"0x5c60da1b"
]
},
{
"pc": 48,
"op": "PUSH2",
"gas": 216,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0"
]
},
{
"pc": 51,
"op": "JUMPI",
"gas": 213,
"gasCost": 10,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0",
"0x98"
]
},
{
"pc": 52,
"op": "DUP1",
"gas": 203,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e"
]
},
{
"pc": 53,
"op": "PUSH4",
"gas": 200,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e"
]
},
{
"pc": 58,
"op": "EQ",
"gas": 197,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e",
"0x8f283970"
]
},
{
"pc": 59,
"op": "PUSH2",
"gas": 194,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0"
]
},
{
"pc": 62,
"op": "JUMPI",
"gas": 191,
"gasCost": 10,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0",
"0xc9"
]
},
{
"pc": 63,
"op": "DUP1",
"gas": 181,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e"
]
},
{
"pc": 64,
"op": "PUSH4",
"gas": 178,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e"
]
},
{
"pc": 69,
"op": "EQ",
"gas": 175,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e",
"0xf851a440"
]
},
{
"pc": 70,
"op": "PUSH2",
"gas": 172,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0"
]
},
{
"pc": 73,
"op": "JUMPI",
"gas": 169,
"gasCost": 10,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0",
"0xe9"
]
},
{
"pc": 74,
"op": "PUSH2",
"gas": 159,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e"
]
},
{
"pc": 77,
"op": "JUMP",
"gas": 156,
"gasCost": 8,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5d"
]
},
{
"pc": 93,
"op": "JUMPDEST",
"gas": 148,
"gasCost": 1,
"depth": 1,
"stack": [
"0x8ef1332e"
]
},
{
"pc": 94,
"op": "PUSH2",
"gas": 147,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e"
]
},
{
"pc": 97,
"op": "PUSH2",
"gas": 144,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b"
]
},
{
"pc": 100,
"op": "JUMP",
"gas": 141,
"gasCost": 8,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0xfe"
]
},
{
"pc": 254,
"op": "JUMPDEST",
"gas": 133,
"gasCost": 1,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b"
]
},
{
"pc": 255,
"op": "PUSH2",
"gas": 132,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b"
]
},
{
"pc": 258,
"op": "PUSH2",
"gas": 129,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106"
]
},
{
"pc": 261,
"op": "JUMP",
"gas": 126,
"gasCost": 8,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106",
"0x29b"
]
},
{
"pc": 667,
"op": "JUMPDEST",
"gas": 118,
"gasCost": 1,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106"
]
},
{
"pc": 668,
"op": "PUSH2",
"gas": 117,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106"
]
},
{
"pc": 671,
"op": "PUSH2",
"gas": 114,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106",
"0x2a3"
]
},
{
"pc": 674,
"op": "JUMP",
"gas": 111,
"gasCost": 8,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106",
"0x2a3",
"0x368"
]
},
{
"pc": 872,
"op": "JUMPDEST",
"gas": 103,
"gasCost": 1,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106",
"0x2a3"
]
},
{
"pc": 873,
"op": "PUSH1",
"gas": 102,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106",
"0x2a3"
]
},
{
"pc": 875,
"op": "PUSH32",
"gas": 99,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106",
"0x2a3",
"0x0"
]
},
{
"pc": 908,
"op": "JUMPDEST",
"gas": 96,
"gasCost": 1,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106",
"0x2a3",
"0x0",
"0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103"
]
},
{
"pc": 909,
"op": "SLOAD",
"gas": 95,
"gasCost": 2100,
"depth": 1,
"error": "out of gas",
"stack": [
"0x8ef1332e",
"0x5b",
"0x106",
"0x2a3",
"0x0",
"0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103"
],
"storage": {
"0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103": "0x0000000000000000000000008eebfef33eb00149852cadb631838ad9bfcce848"
},
"extraData": {
"proofList": [
{
"address": "0x1a258d17bf244c4df02d40343a7626a9d321e105",
"nonce": 1,
"balance": "0x30644e72e131a029b85045b68181585d2833e84879b9705b0e1847ce11600000",
"keccakCodeHash": "0x31f2125c021fb94759cb1993a2f07eae01792311e13f209441ff8969cf1eb835",
"poseidonCodeHash": "0x1cafbbe8f01ed4c292d9a27be523919a274441a076b20c7d713d192dbe6485c2",
"codeSize": 2151,
"storage": {
"key": "0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103",
"value": "0x0000000000000000000000008eebfef33eb00149852cadb631838ad9bfcce848"
}
}
]
}
}
]
}
],
"withdraw_trie_root": "0x0000000000000000000000000000000000000000000000000000000000000000"
}

3710
common/testdata/blockTrace_05.json vendored Normal file

File diff suppressed because it is too large Load Diff

3678
common/testdata/blockTrace_06.json vendored Normal file

File diff suppressed because it is too large Load Diff

3662
common/testdata/blockTrace_07.json vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,120 @@
package types
import (
"encoding/binary"
"fmt"
"math/big"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/crypto"
)
// BatchHeader contains batch header info to be committed.
type BatchHeader struct {
// Encoded in BatchHeaderV0Codec
version uint8
batchIndex uint64
l1MessagePopped uint64
totalL1MessagePopped uint64
dataHash common.Hash
parentBatchHash common.Hash
skippedL1MessageBitmap []byte
}
// NewBatchHeader creates a new BatchHeader
func NewBatchHeader(version uint8, batchIndex, totalL1MessagePoppedBefore uint64, parentBatchHash common.Hash, chunks []*Chunk) (*BatchHeader, error) {
// buffer for storing chunk hashes in order to compute the batch data hash
var dataBytes []byte
// skipped L1 message bitmap, an array of 256-bit bitmaps
var skippedBitmap []*big.Int
// the first queue index that belongs to this batch
baseIndex := totalL1MessagePoppedBefore
// the next queue index that we need to process
nextIndex := totalL1MessagePoppedBefore
for _, chunk := range chunks {
// build data hash
totalL1MessagePoppedBeforeChunk := nextIndex
chunkBytes, err := chunk.Hash(totalL1MessagePoppedBeforeChunk)
if err != nil {
return nil, err
}
dataBytes = append(dataBytes, chunkBytes...)
// build skip bitmap
for _, block := range chunk.Blocks {
for _, tx := range block.Transactions {
if tx.Type != 0x7E {
continue
}
currentIndex := tx.Nonce
if currentIndex < nextIndex {
return nil, fmt.Errorf("unexpected batch payload, expected queue index: %d, got: %d", nextIndex, currentIndex)
}
// mark skipped messages
for skippedIndex := nextIndex; skippedIndex < currentIndex; skippedIndex++ {
quo := int((skippedIndex - baseIndex) / 256)
rem := int((skippedIndex - baseIndex) % 256)
for len(skippedBitmap) <= quo {
bitmap := big.NewInt(0)
skippedBitmap = append(skippedBitmap, bitmap)
}
skippedBitmap[quo].SetBit(skippedBitmap[quo], rem, 1)
}
// process included message
quo := int((currentIndex - baseIndex) / 256)
for len(skippedBitmap) <= quo {
bitmap := big.NewInt(0)
skippedBitmap = append(skippedBitmap, bitmap)
}
nextIndex = currentIndex + 1
}
}
}
// compute data hash
dataHash := crypto.Keccak256Hash(dataBytes)
// compute skipped bitmap
bitmapBytes := make([]byte, len(skippedBitmap)*32)
for ii, num := range skippedBitmap {
bytes := num.Bytes()
padding := 32 - len(bytes)
copy(bitmapBytes[32*ii+padding:], bytes)
}
return &BatchHeader{
version: version,
batchIndex: batchIndex,
l1MessagePopped: nextIndex - totalL1MessagePoppedBefore,
totalL1MessagePopped: nextIndex,
dataHash: dataHash,
parentBatchHash: parentBatchHash,
skippedL1MessageBitmap: bitmapBytes,
}, nil
}
// Encode encodes the BatchHeader into RollupV2 BatchHeaderV0Codec Encoding.
func (b *BatchHeader) Encode() []byte {
batchBytes := make([]byte, 89+len(b.skippedL1MessageBitmap))
batchBytes[0] = b.version
binary.BigEndian.PutUint64(batchBytes[1:], b.batchIndex)
binary.BigEndian.PutUint64(batchBytes[9:], b.l1MessagePopped)
binary.BigEndian.PutUint64(batchBytes[17:], b.totalL1MessagePopped)
copy(batchBytes[25:], b.dataHash[:])
copy(batchBytes[57:], b.parentBatchHash[:])
copy(batchBytes[89:], b.skippedL1MessageBitmap[:])
return batchBytes
}
// Hash calculates the hash of the batch header.
func (b *BatchHeader) Hash() common.Hash {
return crypto.Keccak256Hash(b.Encode())
}

View File

@@ -0,0 +1,234 @@
package types
import (
"encoding/json"
"os"
"testing"
"github.com/scroll-tech/go-ethereum/common"
"github.com/stretchr/testify/assert"
)
func TestNewBatchHeader(t *testing.T) {
// Without L1 Msg
templateBlockTrace, err := os.ReadFile("../testdata/blockTrace_02.json")
assert.NoError(t, err)
wrappedBlock := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace, wrappedBlock))
chunk := &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock,
},
}
parentBatchHeader := &BatchHeader{
version: 1,
batchIndex: 0,
l1MessagePopped: 0,
totalL1MessagePopped: 0,
dataHash: common.HexToHash("0x0"),
parentBatchHash: common.HexToHash("0x0"),
skippedL1MessageBitmap: nil,
}
batchHeader, err := NewBatchHeader(1, 1, 0, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
assert.Equal(t, 0, len(batchHeader.skippedL1MessageBitmap))
// 1 L1 Msg in 1 bitmap
templateBlockTrace2, err := os.ReadFile("../testdata/blockTrace_04.json")
assert.NoError(t, err)
wrappedBlock2 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace2, wrappedBlock2))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock2,
},
}
batchHeader, err = NewBatchHeader(1, 1, 0, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
assert.Equal(t, 32, len(batchHeader.skippedL1MessageBitmap))
expectedBitmap := "00000000000000000000000000000000000000000000000000000000000003ff" // skip first 10
assert.Equal(t, expectedBitmap, common.Bytes2Hex(batchHeader.skippedL1MessageBitmap))
// many consecutive L1 Msgs in 1 bitmap, no leading skipped msgs
templateBlockTrace3, err := os.ReadFile("../testdata/blockTrace_05.json")
assert.NoError(t, err)
wrappedBlock3 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace3, wrappedBlock3))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock3,
},
}
batchHeader, err = NewBatchHeader(1, 1, 37, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
assert.Equal(t, uint64(5), batchHeader.l1MessagePopped)
assert.Equal(t, 32, len(batchHeader.skippedL1MessageBitmap))
expectedBitmap = "0000000000000000000000000000000000000000000000000000000000000000" // all bits are included, so none are skipped
assert.Equal(t, expectedBitmap, common.Bytes2Hex(batchHeader.skippedL1MessageBitmap))
// many consecutive L1 Msgs in 1 bitmap, with leading skipped msgs
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock3,
},
}
batchHeader, err = NewBatchHeader(1, 1, 0, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
assert.Equal(t, uint64(42), batchHeader.l1MessagePopped)
assert.Equal(t, 32, len(batchHeader.skippedL1MessageBitmap))
expectedBitmap = "0000000000000000000000000000000000000000000000000000001fffffffff" // skipped the first 37 messages
assert.Equal(t, expectedBitmap, common.Bytes2Hex(batchHeader.skippedL1MessageBitmap))
// many sparse L1 Msgs in 1 bitmap
templateBlockTrace4, err := os.ReadFile("../testdata/blockTrace_06.json")
assert.NoError(t, err)
wrappedBlock4 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace4, wrappedBlock4))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock4,
},
}
batchHeader, err = NewBatchHeader(1, 1, 0, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
assert.Equal(t, uint64(10), batchHeader.l1MessagePopped)
assert.Equal(t, 32, len(batchHeader.skippedL1MessageBitmap))
expectedBitmap = "00000000000000000000000000000000000000000000000000000000000001dd" // 0111011101
assert.Equal(t, expectedBitmap, common.Bytes2Hex(batchHeader.skippedL1MessageBitmap))
// many L1 Msgs in each of 2 bitmaps
templateBlockTrace5, err := os.ReadFile("../testdata/blockTrace_07.json")
assert.NoError(t, err)
wrappedBlock5 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace5, wrappedBlock5))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock5,
},
}
batchHeader, err = NewBatchHeader(1, 1, 0, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
assert.Equal(t, uint64(257), batchHeader.l1MessagePopped)
assert.Equal(t, 64, len(batchHeader.skippedL1MessageBitmap))
expectedBitmap = "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000"
assert.Equal(t, expectedBitmap, common.Bytes2Hex(batchHeader.skippedL1MessageBitmap))
}
func TestBatchHeaderEncode(t *testing.T) {
// Without L1 Msg
templateBlockTrace, err := os.ReadFile("../testdata/blockTrace_02.json")
assert.NoError(t, err)
wrappedBlock := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace, wrappedBlock))
chunk := &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock,
},
}
parentBatchHeader := &BatchHeader{
version: 1,
batchIndex: 0,
l1MessagePopped: 0,
totalL1MessagePopped: 0,
dataHash: common.HexToHash("0x0"),
parentBatchHash: common.HexToHash("0x0"),
skippedL1MessageBitmap: nil,
}
batchHeader, err := NewBatchHeader(1, 1, 0, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
bytes := batchHeader.Encode()
assert.Equal(t, 89, len(bytes))
assert.Equal(t, "0100000000000000010000000000000000000000000000000010a64c9bd905f8caf5d668fbda622d6558c5a42cdb4b3895709743d159c22e534136709aabc8a23aa17fbcc833da2f7857d3c2884feec9aae73429c135f94985", common.Bytes2Hex(bytes))
// With L1 Msg
templateBlockTrace2, err := os.ReadFile("../testdata/blockTrace_04.json")
assert.NoError(t, err)
wrappedBlock2 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace2, wrappedBlock2))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock2,
},
}
batchHeader, err = NewBatchHeader(1, 1, 0, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
bytes = batchHeader.Encode()
assert.Equal(t, 121, len(bytes))
assert.Equal(t, "010000000000000001000000000000000b000000000000000b457a9e90e8e51ba2de2f66c6b589540b88cf594dac7fa7d04b99cdcfecf24e384136709aabc8a23aa17fbcc833da2f7857d3c2884feec9aae73429c135f9498500000000000000000000000000000000000000000000000000000000000003ff", common.Bytes2Hex(bytes))
}
func TestBatchHeaderHash(t *testing.T) {
// Without L1 Msg
templateBlockTrace, err := os.ReadFile("../testdata/blockTrace_02.json")
assert.NoError(t, err)
wrappedBlock := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace, wrappedBlock))
chunk := &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock,
},
}
parentBatchHeader := &BatchHeader{
version: 1,
batchIndex: 0,
l1MessagePopped: 0,
totalL1MessagePopped: 0,
dataHash: common.HexToHash("0x0"),
parentBatchHash: common.HexToHash("0x0"),
skippedL1MessageBitmap: nil,
}
batchHeader, err := NewBatchHeader(1, 1, 0, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
hash := batchHeader.Hash()
assert.Equal(t, "d69da4357da0073f4093c76e49f077e21bb52f48f57ee3e1fbd9c38a2881af81", common.Bytes2Hex(hash.Bytes()))
templateBlockTrace, err = os.ReadFile("../testdata/blockTrace_03.json")
assert.NoError(t, err)
wrappedBlock2 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace, wrappedBlock2))
chunk2 := &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock2,
},
}
batchHeader2, err := NewBatchHeader(1, 2, 0, batchHeader.Hash(), []*Chunk{chunk2})
assert.NoError(t, err)
assert.NotNil(t, batchHeader2)
hash2 := batchHeader2.Hash()
assert.Equal(t, "34de600163aa745d4513113137a5b54960d13f0d3f2849e490c4b875028bf930", common.Bytes2Hex(hash2.Bytes()))
// With L1 Msg
templateBlockTrace3, err := os.ReadFile("../testdata/blockTrace_04.json")
assert.NoError(t, err)
wrappedBlock3 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace3, wrappedBlock3))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock3,
},
}
batchHeader, err = NewBatchHeader(1, 1, 0, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
hash = batchHeader.Hash()
assert.Equal(t, "0ec9547c6645d5f0c1254e121f49e93f54525cfda5bfb2236440fb3470f48902", common.Bytes2Hex(hash.Bytes()))
}

View File

@@ -1,6 +1,10 @@
package types
import (
"encoding/binary"
"errors"
"math"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/core/types"
)
@@ -12,3 +16,46 @@ type WrappedBlock struct {
Transactions []*types.TransactionData `json:"transactions"`
WithdrawTrieRoot common.Hash `json:"withdraw_trie_root,omitempty"`
}
// NumL1Messages returns the number of L1 messages in this block.
// This number is the sum of included and skipped L1 messages.
func (w *WrappedBlock) NumL1Messages(totalL1MessagePoppedBefore uint64) uint64 {
var lastQueueIndex *uint64
for _, txData := range w.Transactions {
if txData.Type == 0x7E {
lastQueueIndex = &txData.Nonce
}
}
if lastQueueIndex == nil {
return 0
}
// note: last queue index included before this block is totalL1MessagePoppedBefore - 1
// TODO: cache results
return *lastQueueIndex - totalL1MessagePoppedBefore + 1
}
// Encode encodes the WrappedBlock into RollupV2 BlockContext Encoding.
func (w *WrappedBlock) Encode(totalL1MessagePoppedBefore uint64) ([]byte, error) {
bytes := make([]byte, 60)
if !w.Header.Number.IsUint64() {
return nil, errors.New("block number is not uint64")
}
if len(w.Transactions) > math.MaxUint16 {
return nil, errors.New("number of transactions exceeds max uint16")
}
numL1Messages := w.NumL1Messages(totalL1MessagePoppedBefore)
if numL1Messages > math.MaxUint16 {
return nil, errors.New("number of L1 messages exceeds max uint16")
}
binary.BigEndian.PutUint64(bytes[0:], w.Header.Number.Uint64())
binary.BigEndian.PutUint64(bytes[8:], w.Header.Time)
// TODO: [16:47] Currently, baseFee is 0, because we disable EIP-1559.
binary.BigEndian.PutUint64(bytes[48:], w.Header.GasLimit)
binary.BigEndian.PutUint16(bytes[56:], uint16(len(w.Transactions)))
binary.BigEndian.PutUint16(bytes[58:], uint16(numL1Messages))
return bytes, nil
}

130
common/types/chunk.go Normal file
View File

@@ -0,0 +1,130 @@
package types
import (
"encoding/binary"
"encoding/hex"
"errors"
"fmt"
"strings"
"github.com/scroll-tech/go-ethereum/common/hexutil"
"github.com/scroll-tech/go-ethereum/core/types"
"github.com/scroll-tech/go-ethereum/crypto"
)
// Chunk contains blocks to be encoded
type Chunk struct {
Blocks []*WrappedBlock `json:"blocks"`
}
// NumL1Messages returns the number of L1 messages in this chunk.
// This number is the sum of included and skipped L1 messages.
func (c *Chunk) NumL1Messages(totalL1MessagePoppedBefore uint64) uint64 {
var numL1Messages uint64
for _, block := range c.Blocks {
numL1MessagesInBlock := block.NumL1Messages(totalL1MessagePoppedBefore)
numL1Messages += numL1MessagesInBlock
totalL1MessagePoppedBefore += numL1MessagesInBlock
}
// TODO: cache results
return numL1Messages
}
// Encode encodes the Chunk into RollupV2 Chunk Encoding.
func (c *Chunk) Encode(totalL1MessagePoppedBefore uint64) ([]byte, error) {
numBlocks := len(c.Blocks)
if numBlocks > 255 {
return nil, errors.New("number of blocks exceeds 1 byte")
}
if numBlocks == 0 {
return nil, errors.New("number of blocks is 0")
}
var chunkBytes []byte
chunkBytes = append(chunkBytes, byte(numBlocks))
var l2TxDataBytes []byte
for _, block := range c.Blocks {
blockBytes, err := block.Encode(totalL1MessagePoppedBefore)
if err != nil {
return nil, fmt.Errorf("failed to encode block: %v", err)
}
totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore)
if len(blockBytes) != 60 {
return nil, fmt.Errorf("block encoding is not 60 bytes long %x", len(blockBytes))
}
chunkBytes = append(chunkBytes, blockBytes...)
// Append rlp-encoded l2Txs
for _, txData := range block.Transactions {
if txData.Type == 0x7E {
continue
}
data, _ := hexutil.Decode(txData.Data)
// right now we only support legacy tx
tx := types.NewTx(&types.LegacyTx{
Nonce: txData.Nonce,
To: txData.To,
Value: txData.Value.ToInt(),
Gas: txData.Gas,
GasPrice: txData.GasPrice.ToInt(),
Data: data,
V: txData.V.ToInt(),
R: txData.R.ToInt(),
S: txData.S.ToInt(),
})
rlpTxData, _ := tx.MarshalBinary()
var txLen [4]byte
binary.BigEndian.PutUint32(txLen[:], uint32(len(rlpTxData)))
l2TxDataBytes = append(l2TxDataBytes, txLen[:]...)
l2TxDataBytes = append(l2TxDataBytes, rlpTxData...)
}
}
chunkBytes = append(chunkBytes, l2TxDataBytes...)
return chunkBytes, nil
}
// Hash hashes the Chunk into RollupV2 Chunk Hash
func (c *Chunk) Hash(totalL1MessagePoppedBefore uint64) ([]byte, error) {
chunkBytes, err := c.Encode(totalL1MessagePoppedBefore)
if err != nil {
return nil, err
}
numBlocks := chunkBytes[0]
// concatenate block contexts
var dataBytes []byte
for i := 0; i < int(numBlocks); i++ {
// only first 58 bytes is needed
dataBytes = append(dataBytes, chunkBytes[1+60*i:60*i+59]...)
}
// concatenate l1 and l2 tx hashes
for _, block := range c.Blocks {
var l1TxHashes []byte
var l2TxHashes []byte
for _, txData := range block.Transactions {
txHash := strings.TrimPrefix(txData.TxHash, "0x")
hashBytes, err := hex.DecodeString(txHash)
if err != nil {
return nil, err
}
if txData.Type == 0x7E {
l1TxHashes = append(l1TxHashes, hashBytes...)
} else {
l2TxHashes = append(l2TxHashes, hashBytes...)
}
}
dataBytes = append(dataBytes, l1TxHashes...)
dataBytes = append(dataBytes, l2TxHashes...)
}
hash := crypto.Keccak256Hash(dataBytes).Bytes()
return hash, nil
}

140
common/types/chunk_test.go Normal file
View File

@@ -0,0 +1,140 @@
package types
import (
"encoding/hex"
"encoding/json"
"os"
"testing"
"github.com/stretchr/testify/assert"
)
func TestChunkEncode(t *testing.T) {
// Test case 1: when the chunk contains no blocks.
chunk := &Chunk{
Blocks: []*WrappedBlock{},
}
bytes, err := chunk.Encode(0)
assert.Nil(t, bytes)
assert.Error(t, err)
assert.Contains(t, err.Error(), "number of blocks is 0")
// Test case 2: when the chunk contains more than 255 blocks.
chunk = &Chunk{
Blocks: []*WrappedBlock{},
}
for i := 0; i < 256; i++ {
chunk.Blocks = append(chunk.Blocks, &WrappedBlock{})
}
bytes, err = chunk.Encode(0)
assert.Nil(t, bytes)
assert.Error(t, err)
assert.Contains(t, err.Error(), "number of blocks exceeds 1 byte")
// Test case 3: when the chunk contains one block.
templateBlockTrace, err := os.ReadFile("../testdata/blockTrace_02.json")
assert.NoError(t, err)
wrappedBlock := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace, wrappedBlock))
assert.Equal(t, uint64(0), wrappedBlock.NumL1Messages(0))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock,
},
}
bytes, err = chunk.Encode(0)
hexString := hex.EncodeToString(bytes)
assert.NoError(t, err)
assert.Equal(t, 299, len(bytes))
assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000000000000355418d1e81840002000000000073f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8b00000073f87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1", hexString)
// Test case 4: when the chunk contains one block with 1 L1MsgTx
templateBlockTrace2, err := os.ReadFile("../testdata/blockTrace_04.json")
assert.NoError(t, err)
wrappedBlock2 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace2, wrappedBlock2))
assert.Equal(t, uint64(11), wrappedBlock2.NumL1Messages(0)) // 0..=9 skipped, 10 included
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock2,
},
}
bytes, err = chunk.Encode(0)
hexString = hex.EncodeToString(bytes)
assert.NoError(t, err)
assert.Equal(t, 97, len(bytes))
assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a12000002000b00000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e1058080808080", hexString)
// Test case 5: when the chunk contains two blocks each with 1 L1MsgTx
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock2,
wrappedBlock2,
},
}
bytes, err = chunk.Encode(0)
hexString = hex.EncodeToString(bytes)
assert.NoError(t, err)
assert.Equal(t, 193, len(bytes))
assert.Equal(t, "02000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a12000002000b000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a12000002000000000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e105808080808000000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e1058080808080", hexString)
}
func TestChunkHash(t *testing.T) {
// Test case 1: when the chunk contains no blocks
chunk := &Chunk{
Blocks: []*WrappedBlock{},
}
bytes, err := chunk.Hash(0)
assert.Nil(t, bytes)
assert.Error(t, err)
assert.Contains(t, err.Error(), "number of blocks is 0")
// Test case 2: successfully hashing a chunk on one block
templateBlockTrace, err := os.ReadFile("../testdata/blockTrace_02.json")
assert.NoError(t, err)
wrappedBlock := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace, wrappedBlock))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock,
},
}
bytes, err = chunk.Hash(0)
hexString := hex.EncodeToString(bytes)
assert.NoError(t, err)
assert.Equal(t, "78c839dfc494396c16b40946f32b3f4c3e8c2d4bfd04aefcf235edec474482f8", hexString)
// Test case 3: successfully hashing a chunk on two blocks
templateBlockTrace1, err := os.ReadFile("../testdata/blockTrace_03.json")
assert.NoError(t, err)
wrappedBlock1 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace1, wrappedBlock1))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock,
wrappedBlock1,
},
}
bytes, err = chunk.Hash(0)
hexString = hex.EncodeToString(bytes)
assert.NoError(t, err)
assert.Equal(t, "aa9e494f72bc6965857856f0fae6916f27b2a6591c714a573b2fab46df03b8ae", hexString)
// Test case 4: successfully hashing a chunk on two blocks each with L1 and L2 txs
templateBlockTrace2, err := os.ReadFile("../testdata/blockTrace_04.json")
assert.NoError(t, err)
wrappedBlock2 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace2, wrappedBlock2))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock2,
wrappedBlock2,
},
}
bytes, err = chunk.Hash(0)
hexString = hex.EncodeToString(bytes)
assert.NoError(t, err)
assert.Equal(t, "42967825696a129e7a83f082097aca982747480956dcaa448c9296e795c9a91a", hexString)
}

View File

@@ -5,7 +5,7 @@ import (
"runtime/debug"
)
var tag = "v3.3.1"
var tag = "v3.3.7"
var commit = func() string {
if info, ok := debug.ReadBuildInfo(); ok {

View File

@@ -1,4 +1,5 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
yarn lint-staged && forge build && npx hardhat compile
cd contracts
yarn lint-staged

View File

@@ -2,7 +2,6 @@
Note: For more comprehensive documentation, see [`./docs/`](./docs).
## Directory Structure
```
@@ -24,10 +23,8 @@ remappings.txt - "foundry dependency mappings"
...
```
## Dependencies
### Foundry
First run the command below to get foundryup, the Foundry toolchain installer:
@@ -42,24 +39,22 @@ Then, run `foundryup` in a new terminal session or after reloading your `PATH`.
Other ways to install Foundry can be found [here](https://github.com/foundry-rs/foundry#installation).
### Hardhat
```
yarn install
```
## Build
+ Run `git submodule update --init --recursive` to initialise git submodules.
+ Run `yarn prettier:solidity` to run linting in fix mode, will auto-format all solidity codes.
+ Run `yarn prettier` to run linting in fix mode, will auto-format all typescript codes.
+ Run `forge build` to compile contracts with foundry.
+ Run `npx hardhat compile` to compile with hardhat.
+ Run `forge test -vvv` to run foundry units tests. It will compile all contracts before running the unit tests.
+ Run `npx hardhat test` to run integration tests. It may not compile all contracts before running, it's better to run `npx hardhat compile` first.
- Run `git submodule update --init --recursive` to initialise git submodules.
- Run `yarn prettier:solidity` to run linting in fix mode, will auto-format all solidity codes.
- Run `yarn prettier` to run linting in fix mode, will auto-format all typescript codes.
- Run `yarn prepare` to install the precommit linting hook
- Run `forge build` to compile contracts with foundry.
- Run `npx hardhat compile` to compile with hardhat.
- Run `forge test -vvv` to run foundry units tests. It will compile all contracts before running the unit tests.
- Run `npx hardhat test` to run integration tests. It may not compile all contracts before running, it's better to run `npx hardhat compile` first.
## TODO

View File

@@ -63,28 +63,6 @@ Initialize the storage of L1ScrollMessenger.
| _rollup | address | The address of ScrollChain contract. |
| _messageQueue | address | The address of L1MessageQueue contract. |
### isL1MessageRelayed
```solidity
function isL1MessageRelayed(bytes32) external view returns (bool)
```
Mapping from relay id to relay status.
#### Parameters
| Name | Type | Description |
|---|---|---|
| _0 | bytes32 | undefined |
#### Returns
| Name | Type | Description |
|---|---|---|
| _0 | bool | undefined |
### isL1MessageSent
```solidity

View File

@@ -2,7 +2,16 @@
/* eslint-disable node/no-missing-import */
import { expect } from "chai";
import { BigNumber, constants } from "ethers";
import { concat, getAddress, hexlify, keccak256, randomBytes, RLP } from "ethers/lib/utils";
import {
concat,
getAddress,
hexlify,
keccak256,
randomBytes,
RLP,
stripZeros,
TransactionTypes,
} from "ethers/lib/utils";
import { ethers } from "hardhat";
import { L1MessageQueue, L2GasPriceOracle } from "../typechain";
import { SignerWithAddress } from "@nomiclabs/hardhat-ethers/signers";
@@ -94,8 +103,8 @@ describe("L1MessageQueue", async () => {
context("#computeTransactionHash", async () => {
it("should succeed", async () => {
const sender = hexlify(randomBytes(20));
const target = hexlify(randomBytes(20));
const sender = "0xb2a70fab1a45b1b9be443b6567849a1702bc1232";
const target = "0xcb18150e4efefb6786130e289a5f61a82a5b86d7";
const transactionType = "0x7E";
for (const nonce of [
@@ -123,19 +132,30 @@ describe("L1MessageQueue", async () => {
constants.MaxUint256,
]) {
for (const dataLen of [0, 1, 2, 3, 4, 55, 56, 100]) {
const data = randomBytes(dataLen);
const transactionPayload = RLP.encode([
nonce.toHexString(),
gasLimit.toHexString(),
target,
value.toHexString(),
data,
sender,
]);
const payload = concat([transactionType, transactionPayload]);
const expectedHash = keccak256(payload);
const computedHash = await queue.computeTransactionHash(sender, nonce, value, target, gasLimit, data);
expect(expectedHash).to.eq(computedHash);
const tests = [randomBytes(dataLen)];
if (dataLen === 1) {
for (const byte of [0, 1, 127, 128]) {
tests.push(Uint8Array.from([byte]));
}
}
for (const data of tests) {
const transactionPayload = RLP.encode([
stripZeros(nonce.toHexString()),
stripZeros(gasLimit.toHexString()),
target,
stripZeros(value.toHexString()),
data,
sender,
]);
const payload = concat([transactionType, transactionPayload]);
const expectedHash = keccak256(payload);
const computedHash = await queue.computeTransactionHash(sender, nonce, value, target, gasLimit, data);
if (computedHash !== expectedHash) {
console.log(hexlify(transactionPayload));
console.log(nonce, gasLimit, target, value, data, sender);
}
expect(expectedHash).to.eq(computedHash);
}
}
}
}

View File

@@ -11,7 +11,8 @@
"lint:sol": "./node_modules/.bin/prettier --write 'src/**/*.sol'",
"lint:ts": "./node_modules/.bin/prettier --write 'integration-test/**/*.ts' 'scripts/**/*.ts' *.ts",
"lint": "yarn lint:ts && yarn lint:sol",
"coverage": "hardhat coverage"
"coverage": "hardhat coverage",
"prepare": "cd .. && husky install contracts/.husky"
},
"devDependencies": {
"@nomiclabs/hardhat-ethers": "^2.0.0",

View File

@@ -0,0 +1,27 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";
import {Fallback} from "../../src/misc/Fallback.sol";
contract DeployFallbackContracts is Script {
uint256 DEPLOYER_PRIVATE_KEY = vm.envUint("DEPLOYER_PRIVATE_KEY");
uint256 NUM_CONTRACTS = vm.envUint("NUM_CONTRACTS");
function run() external {
vm.startBroadcast(DEPLOYER_PRIVATE_KEY);
for (uint256 ii = 0; ii < NUM_CONTRACTS; ++ii) {
Fallback fallbackContract = new Fallback();
logAddress("FALLBACK", address(fallbackContract));
}
vm.stopBroadcast();
}
function logAddress(string memory name, address addr) internal view {
console.log(string(abi.encodePacked(name, "=", vm.toString(address(addr)))));
}
}

View File

@@ -19,7 +19,7 @@ async function main() {
console.log("Using rollup proxy address:", rollupAddr);
const ScrollChain = await ethers.getContractAt("ScrollChain", rollupAddr, deployer);
const genesis = JSON.parse(fs.readFileSync(GENESIS_FILE_PATH, 'utf8'));
const genesis = JSON.parse(fs.readFileSync(GENESIS_FILE_PATH, "utf8"));
console.log("Using genesis block:", genesis.blockHash);
const tx = await ScrollChain.importGenesisBatch(genesis);

View File

@@ -26,16 +26,16 @@ async function main() {
const L2StandardERC20FactoryAddress = process.env.L2_SCROLL_STANDARD_ERC20_FACTORY_ADDR!;
// if ((await L1StandardERC20Gateway.counterpart()) === constants.AddressZero) {
const tx = await L1StandardERC20Gateway.initialize(
L2StandardERC20GatewayAddress,
L1GatewayRouterAddress,
L1ScrollMessengerAddress,
L2StandardERC20Impl,
L2StandardERC20FactoryAddress
);
console.log("initialize L1StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
const tx = await L1StandardERC20Gateway.initialize(
L2StandardERC20GatewayAddress,
L1GatewayRouterAddress,
L1ScrollMessengerAddress,
L2StandardERC20Impl,
L2StandardERC20FactoryAddress
);
console.log("initialize L1StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
// }
}

View File

@@ -24,14 +24,14 @@ async function main() {
const L2GatewayRouterAddress = process.env.L2_GATEWAY_ROUTER_PROXY_ADDR!;
// if ((await L1GatewayRouter.counterpart()) === constants.AddressZero) {
const tx = await L1GatewayRouter.initialize(
L1StandardERC20GatewayAddress,
L2GatewayRouterAddress,
L1ScrollMessengerAddress
);
console.log("initialize L1StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
const tx = await L1GatewayRouter.initialize(
L1StandardERC20GatewayAddress,
L2GatewayRouterAddress,
L1ScrollMessengerAddress
);
console.log("initialize L1StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
// }
}

View File

@@ -22,10 +22,10 @@ async function main() {
const ZKRollupAddress = addressFile.get("ZKRollup.proxy");
// if ((await L1ScrollMessenger.rollup()) === constants.AddressZero) {
const tx = await L1ScrollMessenger.initialize(ZKRollupAddress);
console.log("initialize L1StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
const tx = await L1ScrollMessenger.initialize(ZKRollupAddress);
console.log("initialize L1StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
// }
}

View File

@@ -25,15 +25,15 @@ async function main() {
const L1StandardERC20GatewayAddress = process.env.L1_STANDARD_ERC20_GATEWAY_PROXY_ADDR!;
// if ((await L2StandardERC20Gateway.counterpart()) === constants.AddressZero) {
const tx = await L2StandardERC20Gateway.initialize(
L1StandardERC20GatewayAddress,
L2GatewayRouterAddress,
L2ScrollMessengerAddress,
L2StandardERC20FactoryAddress
);
console.log("initialize L2StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
const tx = await L2StandardERC20Gateway.initialize(
L1StandardERC20GatewayAddress,
L2GatewayRouterAddress,
L2ScrollMessengerAddress,
L2StandardERC20FactoryAddress
);
console.log("initialize L2StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
// }
}

View File

@@ -24,14 +24,14 @@ async function main() {
const L1GatewayRouterAddress = process.env.L1_GATEWAY_ROUTER_PROXY_ADDR!;
// if ((await L2GatewayRouter.counterpart()) === constants.AddressZero) {
const tx = await L2GatewayRouter.initialize(
L2StandardERC20GatewayAddress,
L1GatewayRouterAddress,
L2ScrollMessengerAddress
);
console.log("initialize L1StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
const tx = await L2GatewayRouter.initialize(
L2StandardERC20GatewayAddress,
L1GatewayRouterAddress,
L2ScrollMessengerAddress
);
console.log("initialize L1StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
// }
}

View File

@@ -21,10 +21,10 @@ async function main() {
const L2StandardERC20GatewayAddress = addressFile.get("L2StandardERC20Gateway.proxy");
// if ((await ScrollStandardERC20Factory.owner()) !== L2StandardERC20GatewayAddress) {
const tx = await ScrollStandardERC20Factory.transferOwnership(L2StandardERC20GatewayAddress);
console.log("transfer ownernship ScrollStandardERC20Factory, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
const tx = await ScrollStandardERC20Factory.transferOwnership(L2StandardERC20GatewayAddress);
console.log("transfer ownernship ScrollStandardERC20Factory, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
// }
}

View File

@@ -10,9 +10,7 @@ export function selectAddressFile(network: string) {
}
let filename: string;
if (["hardhat",
"l1geth", "l2geth",
].includes(network)) {
if (["hardhat", "l1geth", "l2geth"].includes(network)) {
filename = path.join(CONFIG_FILE_DIR, `${network}.json`);
} else {
throw new Error(`network ${network} not supported yet`);

View File

@@ -30,9 +30,6 @@ contract L1ScrollMessenger is PausableUpgradeable, ScrollMessengerBase, IL1Scrol
* Variables *
*************/
/// @notice Mapping from relay id to relay status.
mapping(bytes32 => bool) public isL1MessageRelayed;
/// @notice Mapping from L1 message hash to sent status.
mapping(bytes32 => bool) public isL1MessageSent;
@@ -45,28 +42,6 @@ contract L1ScrollMessenger is PausableUpgradeable, ScrollMessengerBase, IL1Scrol
/// @notice The address of L1MessageQueue contract.
address public messageQueue;
// @note move to ScrollMessengerBase in next big refactor
/// @dev The status of for non-reentrant check.
uint256 private _lock_status;
/**********************
* Function Modifiers *
**********************/
modifier nonReentrant() {
// On the first call to nonReentrant, _notEntered will be true
require(_lock_status != _ENTERED, "ReentrancyGuard: reentrant call");
// Any calls to nonReentrant after this point will fail
_lock_status = _ENTERED;
_;
// By storing the original value once again, a refund is triggered (see
// https://eips.ethereum.org/EIPS/eip-2200)
_lock_status = _NOT_ENTERED;
}
/***************
* Constructor *
***************/
@@ -162,9 +137,6 @@ contract L1ScrollMessenger is PausableUpgradeable, ScrollMessengerBase, IL1Scrol
} else {
emit FailedRelayedMessage(_xDomainCalldataHash);
}
bytes32 _relayId = keccak256(abi.encodePacked(_xDomainCalldataHash, msg.sender, block.number));
isL1MessageRelayed[_relayId] = true;
}
/// @inheritdoc IL1ScrollMessenger

View File

@@ -22,4 +22,32 @@ interface IL1GatewayRouter is IL1ETHGateway, IL1ERC20Gateway {
/// @param token The address of token updated.
/// @param gateway The corresponding address of gateway updated.
event SetERC20Gateway(address indexed token, address indexed gateway);
/*************************
* Public View Functions *
*************************/
/// @notice Return the corresponding gateway address for given token address.
/// @param _token The address of token to query.
function getERC20Gateway(address _token) external view returns (address);
/************************
* Restricted Functions *
************************/
/// @notice Update the address of ETH gateway contract.
/// @dev This function should only be called by contract owner.
/// @param _ethGateway The address to update.
function setETHGateway(address _ethGateway) external;
/// @notice Update the address of default ERC20 gateway contract.
/// @dev This function should only be called by contract owner.
/// @param _defaultERC20Gateway The address to update.
function setDefaultERC20Gateway(address _defaultERC20Gateway) external;
/// @notice Update the mapping from token address to gateway address.
/// @dev This function should only be called by contract owner.
/// @param _tokens The list of addresses of tokens to update.
/// @param _gateways The list of addresses of gateways to update.
function setERC20Gateway(address[] memory _tokens, address[] memory _gateways) external;
}

View File

@@ -68,8 +68,7 @@ contract L1GatewayRouter is OwnableUpgradeable, IL1GatewayRouter {
return IL1ERC20Gateway(_gateway).getL2ERC20Address(_l1Address);
}
/// @notice Return the corresponding gateway address for given token address.
/// @param _token The address of token to query.
/// @inheritdoc IL1GatewayRouter
function getERC20Gateway(address _token) public view returns (address) {
address _gateway = ERC20Gateway[_token];
if (_gateway == address(0)) {
@@ -178,28 +177,21 @@ contract L1GatewayRouter is OwnableUpgradeable, IL1GatewayRouter {
* Restricted Functions *
************************/
/// @notice Update the address of ETH gateway contract.
/// @dev This function should only be called by contract owner.
/// @param _ethGateway The address to update.
/// @inheritdoc IL1GatewayRouter
function setETHGateway(address _ethGateway) external onlyOwner {
ethGateway = _ethGateway;
emit SetETHGateway(_ethGateway);
}
/// @notice Update the address of default ERC20 gateway contract.
/// @dev This function should only be called by contract owner.
/// @param _defaultERC20Gateway The address to update.
/// @inheritdoc IL1GatewayRouter
function setDefaultERC20Gateway(address _defaultERC20Gateway) external onlyOwner {
defaultERC20Gateway = _defaultERC20Gateway;
emit SetDefaultERC20Gateway(_defaultERC20Gateway);
}
/// @notice Update the mapping from token address to gateway address.
/// @dev This function should only be called by contract owner.
/// @param _tokens The list of addresses of tokens to update.
/// @param _gateways The list of addresses of gateways to update.
/// @inheritdoc IL1GatewayRouter
function setERC20Gateway(address[] memory _tokens, address[] memory _gateways) external onlyOwner {
require(_tokens.length == _gateways.length, "length mismatch");

View File

@@ -139,18 +139,27 @@ contract L1MessageQueue is OwnableUpgradeable, IL1MessageQueue {
}
}
function store_uint(_ptr, v) -> ptr {
// This is used for both store uint and single byte.
// Integer zero is special handled by geth to encode as `0x80`
function store_uint_or_byte(_ptr, v, is_uint) -> ptr {
ptr := _ptr
switch lt(v, 128)
case 1 {
// single byte in the [0x00, 0x7f]
mstore(ptr, shl(248, v))
switch and(iszero(v), is_uint)
case 1 {
// integer 0
mstore8(ptr, 0x80)
}
default {
// single byte in the [0x00, 0x7f]
mstore8(ptr, v)
}
ptr := add(ptr, 1)
}
default {
// 1-32 bytes long
let len := get_uint_bytes(v)
mstore(ptr, shl(248, add(len, 0x80)))
mstore8(ptr, add(len, 0x80))
ptr := add(ptr, 1)
mstore(ptr, shl(mul(8, sub(32, len)), v))
ptr := add(ptr, len)
@@ -160,7 +169,7 @@ contract L1MessageQueue is OwnableUpgradeable, IL1MessageQueue {
function store_address(_ptr, v) -> ptr {
ptr := _ptr
// 20 bytes long
mstore(ptr, shl(248, 0x94)) // 0x80 + 0x14
mstore8(ptr, 0x94) // 0x80 + 0x14
ptr := add(ptr, 1)
mstore(ptr, shl(96, v))
ptr := add(ptr, 0x14)
@@ -170,21 +179,21 @@ contract L1MessageQueue is OwnableUpgradeable, IL1MessageQueue {
// 4 byte for list payload length
let start_ptr := add(mload(0x40), 5)
let ptr := start_ptr
ptr := store_uint(ptr, _queueIndex)
ptr := store_uint(ptr, _gasLimit)
ptr := store_uint_or_byte(ptr, _queueIndex, 1)
ptr := store_uint_or_byte(ptr, _gasLimit, 1)
ptr := store_address(ptr, _target)
ptr := store_uint(ptr, _value)
ptr := store_uint_or_byte(ptr, _value, 1)
switch eq(_data.length, 1)
case 1 {
// single byte
ptr := store_uint(ptr, shr(248, calldataload(_data.offset)))
ptr := store_uint_or_byte(ptr, byte(0, calldataload(_data.offset)), 0)
}
default {
switch lt(_data.length, 56)
case 1 {
// a string is 0-55 bytes long
mstore(ptr, shl(248, add(0x80, _data.length)))
mstore8(ptr, add(0x80, _data.length))
ptr := add(ptr, 1)
calldatacopy(ptr, _data.offset, _data.length)
ptr := add(ptr, _data.length)
@@ -192,7 +201,7 @@ contract L1MessageQueue is OwnableUpgradeable, IL1MessageQueue {
default {
// a string is more than 55 bytes long
let len_bytes := get_uint_bytes(_data.length)
mstore(ptr, shl(248, add(0xb7, len_bytes)))
mstore8(ptr, add(0xb7, len_bytes))
ptr := add(ptr, 1)
mstore(ptr, shl(mul(8, sub(32, len_bytes)), _data.length))
ptr := add(ptr, len_bytes)

View File

@@ -0,0 +1,118 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
import {Ownable} from "@openzeppelin/contracts/access/Ownable.sol";
import {IRollupVerifier} from "../../libraries/verifier/IRollupVerifier.sol";
import {IZkEvmVerifier} from "../../libraries/verifier/IZkEvmVerifier.sol";
contract MultipleVersionRollupVerifier is IRollupVerifier, Ownable {
/**********
* Events *
**********/
/// @notice Emitted when the address of verifier is updated.
/// @param startBatchIndex The start batch index when the verifier will be used.
/// @param verifier The address of new verifier.
event UpdateVerifier(uint256 startBatchIndex, address verifier);
/***********
* Structs *
***********/
struct Verifier {
// The start batch index for the verifier.
uint64 startBatchIndex;
// The address of zkevm verifier.
address verifier;
}
/*************
* Variables *
*************/
/// @notice The list of legacy zkevm verifier, sorted by batchIndex in increasing order.
Verifier[] public legacyVerifiers;
/// @notice The lastest used zkevm verifier.
Verifier public latestVerifier;
/***************
* Constructor *
***************/
constructor(address _verifier) {
require(_verifier != address(0), "zero verifier address");
latestVerifier.verifier = _verifier;
}
/*************************
* Public View Functions *
*************************/
/// @notice Return the number of legacy verifiers.
function legacyVerifiersLength() external view returns (uint256) {
return legacyVerifiers.length;
}
/// @notice Compute the verifier should be used for specific batch.
/// @param _batchIndex The batch index to query.
function getVerifier(uint256 _batchIndex) public view returns (address) {
// Normally, we will use the latest verifier.
Verifier memory _verifier = latestVerifier;
if (_verifier.startBatchIndex > _batchIndex) {
uint256 _length = legacyVerifiers.length;
// In most case, only last few verifier will be used by `ScrollChain`.
// So, we use linear search instead of binary search.
unchecked {
for (uint256 i = _length; i > 0; --i) {
_verifier = legacyVerifiers[i - 1];
if (_verifier.startBatchIndex <= _batchIndex) break;
}
}
}
return _verifier.verifier;
}
/*****************************
* Public Mutating Functions *
*****************************/
/// @inheritdoc IRollupVerifier
function verifyAggregateProof(
uint256 _batchIndex,
bytes calldata _aggrProof,
bytes32 _publicInputHash
) external view override {
address _verifier = getVerifier(_batchIndex);
IZkEvmVerifier(_verifier).verify(_aggrProof, _publicInputHash);
}
/************************
* Restricted Functions *
************************/
/// @notice Update the address of zkevm verifier.
/// @param _startBatchIndex The start batch index when the verifier will be used.
/// @param _verifier The address of new verifier.
function updateVerifier(uint64 _startBatchIndex, address _verifier) external onlyOwner {
Verifier memory _latestVerifier = latestVerifier;
require(_startBatchIndex >= _latestVerifier.startBatchIndex, "start batch index too small");
require(_verifier != address(0), "zero verifier address");
if (_latestVerifier.startBatchIndex < _startBatchIndex) {
legacyVerifiers.push(_latestVerifier);
_latestVerifier.startBatchIndex = _startBatchIndex;
}
_latestVerifier.verifier = _verifier;
latestVerifier = _latestVerifier;
emit UpdateVerifier(_startBatchIndex, _verifier);
}
}

View File

@@ -25,6 +25,11 @@ contract ScrollChain is OwnableUpgradeable, IScrollChain {
/// @param status The status of the account updated.
event UpdateSequencer(address indexed account, bool status);
/// @notice Emitted when owner updates the status of prover.
/// @param account The address of account updated.
/// @param status The status of the account updated.
event UpdateProver(address indexed account, bool status);
/// @notice Emitted when the address of rollup verifier is updated.
/// @param oldVerifier The address of old rollup verifier.
/// @param newVerifier The address of new rollup verifier.
@@ -58,6 +63,9 @@ contract ScrollChain is OwnableUpgradeable, IScrollChain {
/// @notice Whether an account is a sequencer.
mapping(address => bool) public isSequencer;
/// @notice Whether an account is a prover.
mapping(address => bool) public isProver;
/// @notice The latest finalized batch index.
uint256 public lastFinalizedBatchIndex;
@@ -80,6 +88,11 @@ contract ScrollChain is OwnableUpgradeable, IScrollChain {
_;
}
modifier OnlyProver() {
require(isProver[msg.sender], "caller not prover");
_;
}
/***************
* Constructor *
***************/
@@ -117,12 +130,7 @@ contract ScrollChain is OwnableUpgradeable, IScrollChain {
*****************************/
/// @notice Import layer 2 genesis block
/// @dev Although `_withdrawRoot` is always zero, we add this parameter for the convenience of unit testing.
function importGenesisBatch(
bytes calldata _batchHeader,
bytes32 _stateRoot,
bytes32 _withdrawRoot
) external {
function importGenesisBatch(bytes calldata _batchHeader, bytes32 _stateRoot) external {
// check genesis batch header length
require(_stateRoot != bytes32(0), "zero state root");
@@ -144,10 +152,9 @@ contract ScrollChain is OwnableUpgradeable, IScrollChain {
committedBatches[0] = _batchHash;
finalizedStateRoots[0] = _stateRoot;
withdrawRoots[0] = _withdrawRoot;
emit CommitBatch(_batchHash);
emit FinalizeBatch(_batchHash, _stateRoot, _withdrawRoot);
emit FinalizeBatch(_batchHash, _stateRoot, bytes32(0));
}
/// @inheritdoc IScrollChain
@@ -278,7 +285,7 @@ contract ScrollChain is OwnableUpgradeable, IScrollChain {
bytes32 _postStateRoot,
bytes32 _withdrawRoot,
bytes calldata _aggrProof
) external override OnlySequencer {
) external override OnlyProver {
require(_prevStateRoot != bytes32(0), "previous state root is zero");
require(_postStateRoot != bytes32(0), "new state root is zero");
@@ -301,7 +308,7 @@ contract ScrollChain is OwnableUpgradeable, IScrollChain {
);
// verify batch
IRollupVerifier(verifier).verifyAggregateProof(_aggrProof, _publicInputHash);
IRollupVerifier(verifier).verifyAggregateProof(_batchIndex, _aggrProof, _publicInputHash);
// check and update lastFinalizedBatchIndex
unchecked {
@@ -352,6 +359,16 @@ contract ScrollChain is OwnableUpgradeable, IScrollChain {
emit UpdateSequencer(_account, _status);
}
/// @notice Update the status of prover.
/// @dev This function can only called by contract owner.
/// @param _account The address of account to update.
/// @param _status The status of the account to update.
function updateProver(address _account, bool _status) external onlyOwner {
isProver[_account] = _status;
emit UpdateProver(_account, _status);
}
/// @notice Update the address verifier contract.
/// @param _newVerifier The address of new verifier contract.
function updateVerifier(address _newVerifier) external onlyOwner {
@@ -402,11 +419,13 @@ contract ScrollChain is OwnableUpgradeable, IScrollChain {
bytes calldata _skippedL1MessageBitmap
) internal view returns (uint256 _totalNumL1MessagesInChunk) {
uint256 chunkPtr;
uint256 startDataPtr;
uint256 dataPtr;
uint256 blockPtr;
assembly {
dataPtr := mload(0x40)
startDataPtr := dataPtr
chunkPtr := add(_chunk, 0x20) // skip chunkLength
blockPtr := add(chunkPtr, 1) // skip numBlocks
}
@@ -414,15 +433,23 @@ contract ScrollChain is OwnableUpgradeable, IScrollChain {
uint256 _numBlocks = ChunkCodec.validateChunkLength(chunkPtr, _chunk.length);
// concatenate block contexts
uint256 _totalTransactionsInChunk;
for (uint256 i = 0; i < _numBlocks; i++) {
dataPtr = ChunkCodec.copyBlockContext(chunkPtr, dataPtr, i);
uint256 _numTransactionsInBlock = ChunkCodec.numTransactions(blockPtr);
unchecked {
_totalTransactionsInChunk += _numTransactionsInBlock;
blockPtr += ChunkCodec.BLOCK_CONTEXT_LENGTH;
}
}
assembly {
mstore(0x40, add(dataPtr, mul(_totalTransactionsInChunk, 0x20))) // reserve memory for tx hashes
blockPtr := add(chunkPtr, 1) // reset block ptr
}
// concatenate tx hashes
uint256 l2TxPtr = ChunkCodec.l2TxPtr(chunkPtr, _numBlocks);
// avoid stack too deep on forge coverage
uint256 _totalTransactionsInChunk;
while (_numBlocks > 0) {
// concatenate l1 message hashes
uint256 _numL1MessagesInBlock = ChunkCodec.numL1Messages(blockPtr);
@@ -446,7 +473,6 @@ contract ScrollChain is OwnableUpgradeable, IScrollChain {
}
unchecked {
_totalTransactionsInChunk += _numTransactionsInBlock;
_totalNumL1MessagesInChunk += _numL1MessagesInBlock;
_totalL1MessagesPoppedInBatch += _numL1MessagesInBlock;
_totalL1MessagesPoppedOverall += _numL1MessagesInBlock;
@@ -467,9 +493,7 @@ contract ScrollChain is OwnableUpgradeable, IScrollChain {
// compute data hash and store to memory
assembly {
let startPtr := mload(0x40)
let dataHash := keccak256(startPtr, sub(dataPtr, startPtr))
let dataHash := keccak256(startDataPtr, sub(dataPtr, startDataPtr))
mstore(memPtr, dataHash)
}

View File

@@ -62,28 +62,6 @@ contract L2ScrollMessenger is ScrollMessengerBase, PausableUpgradeable, IL2Scrol
/// @notice The maximum number of times each L1 message can fail on L2.
uint256 public maxFailedExecutionTimes;
// @note move to ScrollMessengerBase in next big refactor
/// @dev The status of for non-reentrant check.
uint256 private _lock_status;
/**********************
* Function Modifiers *
**********************/
modifier nonReentrant() {
// On the first call to nonReentrant, _notEntered will be true
require(_lock_status != _ENTERED, "ReentrancyGuard: reentrant call");
// Any calls to nonReentrant after this point will fail
_lock_status = _ENTERED;
_;
// By storing the original value once again, a refund is triggered (see
// https://eips.ethereum.org/EIPS/eip-2200)
_lock_status = _NOT_ENTERED;
}
/***************
* Constructor *
***************/
@@ -126,14 +104,16 @@ contract L2ScrollMessenger is ScrollMessengerBase, PausableUpgradeable, IL2Scrol
require(_expectedStateRoot != bytes32(0), "Block is not imported");
bytes32 _storageKey;
// `mapping(bytes32 => bool) public isL1MessageSent` is the 105-nd slot of contract `L1ScrollMessenger`.
// `mapping(bytes32 => bool) public isL1MessageSent` is the 155-th slot of contract `L1ScrollMessenger`.
// + 1 from `Initializable`
// + 50 from `OwnableUpgradeable`
// + 50 from `ContextUpgradeable`
// + 4 from `ScrollMessengerBase`
// + 50 from `PausableUpgradeable`
// + 2-nd in `L1ScrollMessenger`
// + 1-st in `L1ScrollMessenger`
assembly {
mstore(0x00, _msgHash)
mstore(0x20, 105)
mstore(0x20, 155)
_storageKey := keccak256(0x00, 0x40)
}
@@ -161,14 +141,16 @@ contract L2ScrollMessenger is ScrollMessengerBase, PausableUpgradeable, IL2Scrol
require(_expectedStateRoot != bytes32(0), "Block not imported");
bytes32 _storageKey;
// `mapping(bytes32 => bool) public isL2MessageExecuted` is the 106-th slot of contract `L1ScrollMessenger`.
// `mapping(bytes32 => bool) public isL2MessageExecuted` is the 156-th slot of contract `L1ScrollMessenger`.
// + 1 from `Initializable`
// + 50 from `OwnableUpgradeable`
// + 50 from `ContextUpgradeable`
// + 4 from `ScrollMessengerBase`
// + 50 from `PausableUpgradeable`
// + 3-rd in `L1ScrollMessenger`
// + 2-nd in `L1ScrollMessenger`
assembly {
mstore(0x00, _msgHash)
mstore(0x20, 106)
mstore(0x20, 156)
_storageKey := keccak256(0x00, 0x40)
}

View File

@@ -51,7 +51,8 @@ contract WETH9 {
balanceOf[msg.sender] -= wad;
}
payable(msg.sender).transfer(wad);
(bool success, ) = msg.sender.call{value:wad}("");
require(success, "withdraw ETH failed");
emit Withdrawal(msg.sender, wad);
}

View File

@@ -38,6 +38,28 @@ abstract contract ScrollMessengerBase is OwnableUpgradeable, IScrollMessenger {
/// @notice The address of fee vault, collecting cross domain messaging fee.
address public feeVault;
// @note move to ScrollMessengerBase in next big refactor
/// @dev The status of for non-reentrant check.
uint256 private _lock_status;
/**********************
* Function Modifiers *
**********************/
modifier nonReentrant() {
// On the first call to nonReentrant, _notEntered will be true
require(_lock_status != _ENTERED, "ReentrancyGuard: reentrant call");
// Any calls to nonReentrant after this point will fail
_lock_status = _ENTERED;
_;
// By storing the original value once again, a refund is triggered (see
// https://eips.ethereum.org/EIPS/eip-2200)
_lock_status = _NOT_ENTERED;
}
/***************
* Constructor *
***************/

View File

@@ -4,7 +4,12 @@ pragma solidity ^0.8.0;
interface IRollupVerifier {
/// @notice Verify aggregate zk proof.
/// @param batchIndex The batch index to verify.
/// @param aggrProof The aggregated proof.
/// @param publicInputHash The public input hash.
function verifyAggregateProof(bytes calldata aggrProof, bytes32 publicInputHash) external view;
function verifyAggregateProof(
uint256 batchIndex,
bytes calldata aggrProof,
bytes32 publicInputHash
) external view;
}

View File

@@ -0,0 +1,10 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
interface IZkEvmVerifier {
/// @notice Verify aggregate zk proof.
/// @param aggrProof The aggregated proof.
/// @param publicInputHash The public input hash.
function verify(bytes calldata aggrProof, bytes32 publicInputHash) external view;
}

View File

@@ -12,6 +12,10 @@ import {Whitelist} from "../L2/predeploys/Whitelist.sol";
import {L1ScrollMessenger} from "../L1/L1ScrollMessenger.sol";
import {L2ScrollMessenger} from "../L2/L2ScrollMessenger.sol";
import {MockRollupVerifier} from "./mocks/MockRollupVerifier.sol";
// solhint-disable no-inline-assembly
abstract contract L1GatewayTestBase is DSTestPlus {
// from L1MessageQueue
event QueueTransaction(
@@ -44,6 +48,8 @@ abstract contract L1GatewayTestBase is DSTestPlus {
EnforcedTxGateway internal enforcedTxGateway;
ScrollChain internal rollup;
MockRollupVerifier internal verifier;
address internal feeVault;
Whitelist private whitelist;
@@ -59,6 +65,7 @@ abstract contract L1GatewayTestBase is DSTestPlus {
rollup = new ScrollChain(1233);
enforcedTxGateway = new EnforcedTxGateway();
whitelist = new Whitelist(address(this));
verifier = new MockRollupVerifier();
// Deploy L2 contracts
l2Messenger = new L2ScrollMessenger(address(0), address(0), address(0));
@@ -74,7 +81,7 @@ abstract contract L1GatewayTestBase is DSTestPlus {
);
gasOracle.initialize(0, 0, 0, 0);
gasOracle.updateWhitelist(address(whitelist));
rollup.initialize(address(messageQueue), address(0), 44);
rollup.initialize(address(messageQueue), address(verifier), 44);
address[] memory _accounts = new address[](1);
_accounts[0] = address(this);
@@ -82,11 +89,40 @@ abstract contract L1GatewayTestBase is DSTestPlus {
}
function prepareL2MessageRoot(bytes32 messageHash) internal {
bytes memory _batchHeader = new bytes(89);
rollup.updateSequencer(address(this), true);
rollup.updateProver(address(this), true);
// import genesis batch
bytes memory batchHeader0 = new bytes(89);
assembly {
mstore(add(_batchHeader, add(0x20, 25)), 1)
mstore(add(batchHeader0, add(0x20, 25)), 1)
}
rollup.importGenesisBatch(batchHeader0, bytes32(uint256(1)));
bytes32 batchHash0 = rollup.committedBatches(0);
// commit one batch
bytes[] memory chunks = new bytes[](1);
bytes memory chunk0 = new bytes(1 + 60);
chunk0[0] = bytes1(uint8(1)); // one block in this chunk
chunks[0] = chunk0;
rollup.commitBatch(0, batchHeader0, chunks, new bytes(0));
bytes memory batchHeader1 = new bytes(89);
assembly {
mstore(add(batchHeader1, 0x20), 0) // version
mstore(add(batchHeader1, add(0x20, 1)), shl(192, 1)) // batchIndex
mstore(add(batchHeader1, add(0x20, 9)), 0) // l1MessagePopped
mstore(add(batchHeader1, add(0x20, 17)), 0) // totalL1MessagePopped
mstore(add(batchHeader1, add(0x20, 25)), 0x246394445f4fe64ed5598554d55d1682d6fb3fe04bf58eb54ef81d1189fafb51) // dataHash
mstore(add(batchHeader1, add(0x20, 57)), batchHash0) // parentBatchHash
}
rollup.importGenesisBatch(_batchHeader, bytes32(uint256(1)), messageHash);
rollup.finalizeBatchWithProof(
batchHeader1,
bytes32(uint256(1)),
bytes32(uint256(2)),
messageHash,
new bytes(0)
);
}
}

View File

@@ -12,81 +12,47 @@ import {Whitelist} from "../L2/predeploys/Whitelist.sol";
import {IL1ScrollMessenger, L1ScrollMessenger} from "../L1/L1ScrollMessenger.sol";
import {L2ScrollMessenger} from "../L2/L2ScrollMessenger.sol";
contract L1ScrollMessengerTest is DSTestPlus {
L2ScrollMessenger internal l2Messenger;
address internal feeVault;
L1ScrollMessenger internal l1Messenger;
ScrollChain internal scrollChain;
L1MessageQueue internal l1MessageQueue;
L2GasPriceOracle internal gasOracle;
EnforcedTxGateway internal enforcedTxGateway;
Whitelist internal whitelist;
import {L1GatewayTestBase} from "./L1GatewayTestBase.t.sol";
contract L1ScrollMessengerTest is L1GatewayTestBase {
function setUp() public {
// Deploy L2 contracts
l2Messenger = new L2ScrollMessenger(address(0), address(0), address(0));
// Deploy L1 contracts
scrollChain = new ScrollChain(0);
l1MessageQueue = new L1MessageQueue();
l1Messenger = new L1ScrollMessenger();
gasOracle = new L2GasPriceOracle();
enforcedTxGateway = new EnforcedTxGateway();
whitelist = new Whitelist(address(this));
// Initialize L1 contracts
l1Messenger.initialize(address(l2Messenger), feeVault, address(scrollChain), address(l1MessageQueue));
l1MessageQueue.initialize(
address(l1Messenger),
address(scrollChain),
address(enforcedTxGateway),
address(gasOracle),
10000000
);
gasOracle.initialize(0, 0, 0, 0);
scrollChain.initialize(address(l1MessageQueue), address(0), 44);
gasOracle.updateWhitelist(address(whitelist));
address[] memory _accounts = new address[](1);
_accounts[0] = address(this);
whitelist.updateWhitelistStatus(_accounts, true);
L1GatewayTestBase.setUpBase();
}
function testForbidCallMessageQueueFromL2() external {
// import genesis batch
bytes memory _batchHeader = new bytes(89);
assembly {
mstore(add(_batchHeader, add(0x20, 25)), 1)
}
scrollChain.importGenesisBatch(
_batchHeader,
bytes32(uint256(1)),
bytes32(0x3152134c22e545ab5d345248502b4f04ef5b45f735f939c7fe6ddc0ffefc9c52)
bytes32 _xDomainCalldataHash = keccak256(
abi.encodeWithSignature(
"relayMessage(address,address,uint256,uint256,bytes)",
address(this),
address(messageQueue),
0,
0,
new bytes(0)
)
);
prepareL2MessageRoot(_xDomainCalldataHash);
IL1ScrollMessenger.L2MessageProof memory proof;
proof.batchIndex = scrollChain.lastFinalizedBatchIndex();
proof.batchIndex = rollup.lastFinalizedBatchIndex();
hevm.expectRevert("Forbid to call message queue");
l1Messenger.relayMessageWithProof(address(this), address(l1MessageQueue), 0, 0, new bytes(0), proof);
l1Messenger.relayMessageWithProof(address(this), address(messageQueue), 0, 0, new bytes(0), proof);
}
function testForbidCallSelfFromL2() external {
// import genesis batch
bytes memory _batchHeader = new bytes(89);
assembly {
mstore(add(_batchHeader, 57), 1)
}
scrollChain.importGenesisBatch(
_batchHeader,
bytes32(uint256(1)),
bytes32(0xf7c03e2b13c88e3fca1410b228b001dd94e3f5ab4b4a4a6981d09a4eb3e5b631)
bytes32 _xDomainCalldataHash = keccak256(
abi.encodeWithSignature(
"relayMessage(address,address,uint256,uint256,bytes)",
address(this),
address(l1Messenger),
0,
0,
new bytes(0)
)
);
prepareL2MessageRoot(_xDomainCalldataHash);
IL1ScrollMessenger.L2MessageProof memory proof;
proof.batchIndex = scrollChain.lastFinalizedBatchIndex();
proof.batchIndex = rollup.lastFinalizedBatchIndex();
hevm.expectRevert("Forbid to call self");
l1Messenger.relayMessageWithProof(address(this), address(l1Messenger), 0, 0, new bytes(0), proof);
@@ -110,7 +76,9 @@ contract L1ScrollMessengerTest is DSTestPlus {
function testReplayMessage(uint256 exceedValue, address refundAddress) external {
hevm.assume(refundAddress.code.length == 0);
hevm.assume(uint256(uint160(refundAddress)) > 100); // ignore some precompile contracts
hevm.assume(uint256(uint160(refundAddress)) > uint256(100)); // ignore some precompile contracts
hevm.assume(refundAddress != feeVault);
hevm.assume(refundAddress != address(0x000000000000000000636F6e736F6c652e6c6f67)); // ignore console/console2
exceedValue = bound(exceedValue, 1, address(this).balance / 2);
@@ -170,7 +138,7 @@ contract L1ScrollMessengerTest is DSTestPlus {
l1Messenger.sendMessage{value: _fee + value}(address(0), value, hex"0011220033", gasLimit);
// update max gas limit
l1MessageQueue.updateMaxGasLimit(gasLimit);
messageQueue.updateMaxGasLimit(gasLimit);
l1Messenger.sendMessage{value: _fee + value}(address(0), value, hex"0011220033", gasLimit);
}
}

View File

@@ -0,0 +1,105 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
import {DSTestPlus} from "solmate/test/utils/DSTestPlus.sol";
import {L1MessageQueue} from "../L1/rollup/L1MessageQueue.sol";
import {MultipleVersionRollupVerifier} from "../L1/rollup/MultipleVersionRollupVerifier.sol";
import {MockZkEvmVerifier} from "./mocks/MockZkEvmVerifier.sol";
contract MultipleVersionRollupVerifierTest is DSTestPlus {
// from MultipleVersionRollupVerifier
event UpdateVerifier(uint256 startBatchIndex, address verifier);
MultipleVersionRollupVerifier private verifier;
MockZkEvmVerifier private v0;
MockZkEvmVerifier private v1;
MockZkEvmVerifier private v2;
function setUp() external {
v0 = new MockZkEvmVerifier();
v1 = new MockZkEvmVerifier();
v2 = new MockZkEvmVerifier();
verifier = new MultipleVersionRollupVerifier(address(v0));
}
function testUpdateVerifier(address _newVerifier) external {
hevm.assume(_newVerifier != address(0));
// set by non-owner, should revert
hevm.startPrank(address(1));
hevm.expectRevert("Ownable: caller is not the owner");
verifier.updateVerifier(0, address(0));
hevm.stopPrank();
// zero verifier address, revert
hevm.expectRevert("zero verifier address");
verifier.updateVerifier(0, address(0));
// change to random operator
assertEq(verifier.legacyVerifiersLength(), 0);
verifier.updateVerifier(uint64(100), _newVerifier);
assertEq(verifier.legacyVerifiersLength(), 1);
(uint64 _startBatchIndex, address _verifier) = verifier.latestVerifier();
assertEq(_startBatchIndex, uint64(100));
assertEq(_verifier, _newVerifier);
(_startBatchIndex, _verifier) = verifier.legacyVerifiers(0);
assertEq(_startBatchIndex, uint64(0));
assertEq(_verifier, address(v0));
// change to same batch index
verifier.updateVerifier(uint64(100), address(v1));
(_startBatchIndex, _verifier) = verifier.latestVerifier();
assertEq(_startBatchIndex, uint64(100));
assertEq(_verifier, address(v1));
(_startBatchIndex, _verifier) = verifier.legacyVerifiers(0);
assertEq(_startBatchIndex, uint64(0));
assertEq(_verifier, address(v0));
// start batch index too small, revert
hevm.expectRevert("start batch index too small");
verifier.updateVerifier(99, _newVerifier);
}
function testGetVerifier() external {
verifier.updateVerifier(100, address(v1));
verifier.updateVerifier(300, address(v2));
assertEq(verifier.getVerifier(0), address(v0));
assertEq(verifier.getVerifier(1), address(v0));
assertEq(verifier.getVerifier(99), address(v0));
assertEq(verifier.getVerifier(100), address(v1));
assertEq(verifier.getVerifier(101), address(v1));
assertEq(verifier.getVerifier(299), address(v1));
assertEq(verifier.getVerifier(300), address(v2));
assertEq(verifier.getVerifier(301), address(v2));
assertEq(verifier.getVerifier(10000), address(v2));
}
function testVerifyAggregateProof() external {
verifier.updateVerifier(100, address(v1));
verifier.updateVerifier(300, address(v2));
hevm.expectRevert(abi.encode(address(v0)));
verifier.verifyAggregateProof(0, new bytes(0), bytes32(0));
hevm.expectRevert(abi.encode(address(v0)));
verifier.verifyAggregateProof(1, new bytes(0), bytes32(0));
hevm.expectRevert(abi.encode(address(v0)));
verifier.verifyAggregateProof(99, new bytes(0), bytes32(0));
hevm.expectRevert(abi.encode(address(v1)));
verifier.verifyAggregateProof(100, new bytes(0), bytes32(0));
hevm.expectRevert(abi.encode(address(v1)));
verifier.verifyAggregateProof(101, new bytes(0), bytes32(0));
hevm.expectRevert(abi.encode(address(v1)));
verifier.verifyAggregateProof(299, new bytes(0), bytes32(0));
hevm.expectRevert(abi.encode(address(v2)));
verifier.verifyAggregateProof(300, new bytes(0), bytes32(0));
hevm.expectRevert(abi.encode(address(v2)));
verifier.verifyAggregateProof(301, new bytes(0), bytes32(0));
hevm.expectRevert(abi.encode(address(v2)));
verifier.verifyAggregateProof(10000, new bytes(0), bytes32(0));
}
}

View File

@@ -10,10 +10,14 @@ import {ScrollChain, IScrollChain} from "../L1/rollup/ScrollChain.sol";
import {MockScrollChain} from "./mocks/MockScrollChain.sol";
import {MockRollupVerifier} from "./mocks/MockRollupVerifier.sol";
// solhint-disable no-inline-assembly
contract ScrollChainTest is DSTestPlus {
// from ScrollChain
event UpdateSequencer(address indexed account, bool status);
event UpdateProver(address indexed account, bool status);
event UpdateVerifier(address oldVerifier, address newVerifier);
event UpdateMaxNumL2TxInChunk(uint256 oldMaxNumL2TxInChunk, uint256 newMaxNumL2TxInChunk);
event CommitBatch(bytes32 indexed batchHash);
event FinalizeBatch(bytes32 indexed batchHash, bytes32 stateRoot, bytes32 withdrawRoot);
@@ -42,35 +46,6 @@ contract ScrollChainTest is DSTestPlus {
rollup.initialize(address(messageQueue), address(0), 100);
}
/*
function testPublicInputHash() public {
IScrollChain.Batch memory batch;
batch.prevStateRoot = bytes32(0x000000000000000000000000000000000000000000000000000000000000cafe);
batch.newStateRoot = bytes32(0);
batch.withdrawTrieRoot = bytes32(0);
batch
.l2Transactions = hex"0000007402f8710582fd14808506e38dccc9825208944d496ccc28058b1d74b7a19541663e21154f9c848801561db11e24a43380c080a0d890606d7a35b2ab0f9b866d62c092d5b163f3e6a55537ae1485aac08c3f8ff7a023997be2d32f53e146b160fff0ba81e81dbb4491c865ab174d15c5b3d28c41ae";
batch.blocks = new IScrollChain.BlockContext[](1);
batch.blocks[0].blockHash = bytes32(0);
batch.blocks[0].parentHash = bytes32(0);
batch.blocks[0].blockNumber = 51966;
batch.blocks[0].timestamp = 123456789;
batch.blocks[0].baseFee = 0;
batch.blocks[0].gasLimit = 10000000000000000;
batch.blocks[0].numTransactions = 1;
batch.blocks[0].numL1Messages = 0;
(bytes32 hash, , , ) = chain.computePublicInputHash(0, batch);
assertEq(hash, bytes32(0xa9f2ca3175794f91226a410ba1e60fff07a405c957562675c4149b77e659d805));
batch
.l2Transactions = hex"00000064f8628001830f424094000000000000000000000000000000000000bbbb8080820a97a064e07cd8f939e2117724bdcbadc80dda421381cbc2a1f4e0d093d9cc5c5cf68ea03e264227f80852d88743cd9e43998f2746b619180366a87e4531debf9c3fa5dc";
(hash, , , ) = chain.computePublicInputHash(0, batch);
assertEq(hash, bytes32(0x398cb22bbfa1665c1b342b813267538a4c933d7f92d8bd9184aba0dd1122987b));
}
*/
function testCommitBatch() public {
bytes memory batchHeader0 = new bytes(89);
@@ -78,7 +53,7 @@ contract ScrollChainTest is DSTestPlus {
assembly {
mstore(add(batchHeader0, add(0x20, 25)), 1)
}
rollup.importGenesisBatch(batchHeader0, bytes32(uint256(1)), bytes32(uint256(0)));
rollup.importGenesisBatch(batchHeader0, bytes32(uint256(1)));
// caller not sequencer, revert
hevm.expectRevert("caller not sequencer");
@@ -148,10 +123,11 @@ contract ScrollChainTest is DSTestPlus {
}
function testFinalizeBatchWithProof() public {
// caller not sequencer, revert
hevm.expectRevert("caller not sequencer");
// caller not prover, revert
hevm.expectRevert("caller not prover");
rollup.finalizeBatchWithProof(new bytes(0), bytes32(0), bytes32(0), bytes32(0), new bytes(0));
rollup.updateProver(address(this), true);
rollup.updateSequencer(address(this), true);
bytes memory batchHeader0 = new bytes(89);
@@ -160,7 +136,7 @@ contract ScrollChainTest is DSTestPlus {
assembly {
mstore(add(batchHeader0, add(0x20, 25)), 1)
}
rollup.importGenesisBatch(batchHeader0, bytes32(uint256(1)), bytes32(uint256(0)));
rollup.importGenesisBatch(batchHeader0, bytes32(uint256(1)));
bytes32 batchHash0 = rollup.committedBatches(0);
bytes[] memory chunks = new bytes[](1);
@@ -240,6 +216,7 @@ contract ScrollChainTest is DSTestPlus {
function testCommitAndFinalizeWithL1Messages() public {
rollup.updateSequencer(address(this), true);
rollup.updateProver(address(this), true);
// import 300 L1 messages
for (uint256 i = 0; i < 300; i++) {
@@ -251,7 +228,7 @@ contract ScrollChainTest is DSTestPlus {
assembly {
mstore(add(batchHeader0, add(0x20, 25)), 1)
}
rollup.importGenesisBatch(batchHeader0, bytes32(uint256(1)), bytes32(uint256(0)));
rollup.importGenesisBatch(batchHeader0, bytes32(uint256(1)));
bytes32 batchHash0 = rollup.committedBatches(0);
bytes memory bitmap;
@@ -260,13 +237,34 @@ contract ScrollChainTest is DSTestPlus {
bytes memory chunk1;
// commit batch1, one chunk with one block, 1 tx, 1 L1 message, no skip
// => payload for data hash of chunk0
// 0000000000000000
// 0000000000000000
// 0000000000000000000000000000000000000000000000000000000000000000
// 0000000000000000
// 0001
// a2277fd30bbbe74323309023b56035b376d7768ad237ae4fc46ead7dc9591ae1
// => data hash for chunk0
// 9ef1e5694bdb014a1eea42be756a8f63bfd8781d6332e9ef3b5126d90c62f110
// => data hash for all chunks
// d9cb6bf9264006fcea490d5c261f7453ab95b1b26033a3805996791b8e3a62f3
// => payload for batch header
// 00
// 0000000000000001
// 0000000000000001
// 0000000000000001
// d9cb6bf9264006fcea490d5c261f7453ab95b1b26033a3805996791b8e3a62f3
// 119b828c2a2798d2c957228ebeaff7e10bb099ae0d4e224f3eeb779ff61cba61
// 0000000000000000000000000000000000000000000000000000000000000000
// => hash for batch header
// 00847173b29b238cf319cde79512b7c213e5a8b4138daa7051914c4592b6dfc7
bytes memory batchHeader1 = new bytes(89 + 32);
assembly {
mstore(add(batchHeader1, 0x20), 0) // version
mstore(add(batchHeader1, add(0x20, 1)), shl(192, 1)) // batchIndex = 1
mstore(add(batchHeader1, add(0x20, 9)), shl(192, 1)) // l1MessagePopped = 1
mstore(add(batchHeader1, add(0x20, 17)), shl(192, 1)) // totalL1MessagePopped = 1
mstore(add(batchHeader1, add(0x20, 25)), 0xfe21c37fa013c76f86b8593ddf15d685a04b55061d06797597ee866f6ff2edf8) // dataHash
mstore(add(batchHeader1, add(0x20, 25)), 0xd9cb6bf9264006fcea490d5c261f7453ab95b1b26033a3805996791b8e3a62f3) // dataHash
mstore(add(batchHeader1, add(0x20, 57)), batchHash0) // parentBatchHash
mstore(add(batchHeader1, add(0x20, 89)), 0) // bitmap0
}
@@ -280,9 +278,9 @@ contract ScrollChainTest is DSTestPlus {
chunks[0] = chunk0;
bitmap = new bytes(32);
rollup.commitBatch(0, batchHeader0, chunks, bitmap);
assertGt(uint256(rollup.committedBatches(1)), 0);
assertBoolEq(rollup.isBatchFinalized(1), false);
bytes32 batchHash1 = rollup.committedBatches(1);
assertEq(batchHash1, bytes32(0x00847173b29b238cf319cde79512b7c213e5a8b4138daa7051914c4592b6dfc7));
// finalize batch1
rollup.finalizeBatchWithProof(
@@ -301,17 +299,57 @@ contract ScrollChainTest is DSTestPlus {
// commit batch2 with two chunks, correctly
// 1. chunk0 has one block, 3 tx, no L1 messages
// => payload for chunk0
// 0000000000000000
// 0000000000000000
// 0000000000000000000000000000000000000000000000000000000000000000
// 0000000000000000
// 0003
// ... (some tx hashes)
// => data hash for chunk0
// 2ac1dad3f3696e5581dfc10f2c7a7a8fc5b344285f7d332c7895a8825fca609a
// 2. chunk1 has three blocks
// 2.1 block0 has 5 tx, 3 L1 messages, no skips
// 2.2 block1 has 10 tx, 5 L1 messages, even is skipped.
// 2.2 block1 has 300 tx, 256 L1 messages, odd position is skipped.
// => payload for chunk1
// 0000000000000000
// 0000000000000000
// 0000000000000000000000000000000000000000000000000000000000000000
// 0000000000000000
// 0005
// 0000000000000000
// 0000000000000000
// 0000000000000000000000000000000000000000000000000000000000000000
// 0000000000000000
// 000a
// 0000000000000000
// 0000000000000000
// 0000000000000000000000000000000000000000000000000000000000000000
// 0000000000000000
// 012c
// ... (some tx hashes)
// => data hash for chunk2
// 0520f1fbe159af97fdf1d6cfcfe7605f99f7bfe3ed876e87b64250b1810df00b
// => data hash for all chunks
// f52343299f6379fd15b20b23d51fc61b9b357b124be112686626b6278bcffa83
// => payload for batch header
// 00
// 0000000000000002
// 0000000000000108
// 0000000000000109
// f52343299f6379fd15b20b23d51fc61b9b357b124be112686626b6278bcffa83
// cef70bf80683c4d9b8b2813e90c314e8c56648e231300b8cfed9d666b0caf14e
// aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa800000000000000000000000000000000000000000000000000000000000000aa
// => hash for batch header
// 2231cf185a5c07931584f970738b4cd2ae4fb39e2d90853b26746c7616ea71b9
bytes memory batchHeader2 = new bytes(89 + 32 + 32);
assembly {
mstore(add(batchHeader2, 0x20), 0) // version
mstore(add(batchHeader2, add(0x20, 1)), shl(192, 2)) // batchIndex = 2
mstore(add(batchHeader2, add(0x20, 9)), shl(192, 264)) // l1MessagePopped = 264
mstore(add(batchHeader2, add(0x20, 17)), shl(192, 265)) // totalL1MessagePopped = 265
mstore(add(batchHeader2, add(0x20, 25)), 0xa447b3c80bc6c3ee1aebc1af746c7c6b35a05c4a7af89d2103e9e0788b54375c) // dataHash
mstore(add(batchHeader2, add(0x20, 25)), 0xf52343299f6379fd15b20b23d51fc61b9b357b124be112686626b6278bcffa83) // dataHash
mstore(add(batchHeader2, add(0x20, 57)), batchHash1) // parentBatchHash
mstore(
add(batchHeader2, add(0x20, 89)),
@@ -358,9 +396,9 @@ contract ScrollChainTest is DSTestPlus {
}
rollup.commitBatch(0, batchHeader1, chunks, bitmap);
assertGt(uint256(rollup.committedBatches(2)), 0);
assertBoolEq(rollup.isBatchFinalized(2), false);
bytes32 batchHash2 = rollup.committedBatches(2);
assertEq(batchHash2, bytes32(0x2231cf185a5c07931584f970738b4cd2ae4fb39e2d90853b26746c7616ea71b9));
// verify committed batch correctly
rollup.finalizeBatchWithProof(
@@ -412,7 +450,7 @@ contract ScrollChainTest is DSTestPlus {
assembly {
mstore(add(batchHeader0, add(0x20, 25)), 1)
}
rollup.importGenesisBatch(batchHeader0, bytes32(uint256(1)), bytes32(uint256(0)));
rollup.importGenesisBatch(batchHeader0, bytes32(uint256(1)));
bytes32 batchHash0 = rollup.committedBatches(0);
bytes[] memory chunks = new bytes[](1);
@@ -480,6 +518,27 @@ contract ScrollChainTest is DSTestPlus {
assertBoolEq(rollup.isSequencer(_sequencer), false);
}
function testUpdateProver(address _prover) public {
// set by non-owner, should revert
hevm.startPrank(address(1));
hevm.expectRevert("Ownable: caller is not the owner");
rollup.updateProver(_prover, true);
hevm.stopPrank();
// change to random operator
hevm.expectEmit(true, false, false, true);
emit UpdateProver(_prover, true);
assertBoolEq(rollup.isProver(_prover), false);
rollup.updateProver(_prover, true);
assertBoolEq(rollup.isProver(_prover), true);
hevm.expectEmit(true, false, false, true);
emit UpdateProver(_prover, false);
rollup.updateProver(_prover, false);
assertBoolEq(rollup.isProver(_prover), false);
}
function testUpdateVerifier(address _newVerifier) public {
// set by non-owner, should revert
hevm.startPrank(address(1));
@@ -496,58 +555,74 @@ contract ScrollChainTest is DSTestPlus {
assertEq(rollup.verifier(), _newVerifier);
}
function testUpdateMaxNumL2TxInChunk(uint256 _maxNumL2TxInChunk) public {
// set by non-owner, should revert
hevm.startPrank(address(1));
hevm.expectRevert("Ownable: caller is not the owner");
rollup.updateMaxNumL2TxInChunk(_maxNumL2TxInChunk);
hevm.stopPrank();
// change to random operator
hevm.expectEmit(false, false, false, true);
emit UpdateMaxNumL2TxInChunk(100, _maxNumL2TxInChunk);
assertEq(rollup.maxNumL2TxInChunk(), 100);
rollup.updateMaxNumL2TxInChunk(_maxNumL2TxInChunk);
assertEq(rollup.maxNumL2TxInChunk(), _maxNumL2TxInChunk);
}
function testImportGenesisBlock() public {
bytes memory batchHeader;
// zero state root, revert
batchHeader = new bytes(89);
hevm.expectRevert("zero state root");
rollup.importGenesisBatch(batchHeader, bytes32(0), bytes32(0));
rollup.importGenesisBatch(batchHeader, bytes32(0));
// batch header length too small, revert
batchHeader = new bytes(88);
hevm.expectRevert("batch header length too small");
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)), bytes32(0));
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)));
// wrong bitmap length, revert
batchHeader = new bytes(90);
hevm.expectRevert("wrong bitmap length");
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)), bytes32(0));
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)));
// not all fields are zero, revert
batchHeader = new bytes(89);
batchHeader[0] = bytes1(uint8(1)); // version not zero
hevm.expectRevert("not all fields are zero");
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)), bytes32(0));
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)));
batchHeader = new bytes(89);
batchHeader[1] = bytes1(uint8(1)); // batchIndex not zero
hevm.expectRevert("not all fields are zero");
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)), bytes32(0));
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)));
batchHeader = new bytes(89 + 32);
assembly {
mstore(add(batchHeader, add(0x20, 9)), shl(192, 1)) // l1MessagePopped not zero
}
hevm.expectRevert("not all fields are zero");
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)), bytes32(0));
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)));
batchHeader = new bytes(89);
batchHeader[17] = bytes1(uint8(1)); // totalL1MessagePopped not zero
hevm.expectRevert("not all fields are zero");
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)), bytes32(0));
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)));
// zero data hash, revert
batchHeader = new bytes(89);
hevm.expectRevert("zero data hash");
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)), bytes32(0));
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)));
// nonzero parent batch hash, revert
batchHeader = new bytes(89);
batchHeader[25] = bytes1(uint8(1)); // dataHash not zero
batchHeader[57] = bytes1(uint8(1)); // parentBatchHash not zero
hevm.expectRevert("nonzero parent batch hash");
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)), bytes32(0));
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)));
// import correctly
batchHeader = new bytes(89);
@@ -555,13 +630,13 @@ contract ScrollChainTest is DSTestPlus {
assertEq(rollup.finalizedStateRoots(0), bytes32(0));
assertEq(rollup.withdrawRoots(0), bytes32(0));
assertEq(rollup.committedBatches(0), bytes32(0));
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)), bytes32(uint256(2)));
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)));
assertEq(rollup.finalizedStateRoots(0), bytes32(uint256(1)));
assertEq(rollup.withdrawRoots(0), bytes32(uint256(2)));
assertEq(rollup.withdrawRoots(0), bytes32(0));
assertGt(uint256(rollup.committedBatches(0)), 0);
// Genesis batch imported, revert
hevm.expectRevert("Genesis batch imported");
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)), bytes32(uint256(2)));
rollup.importGenesisBatch(batchHeader, bytes32(uint256(1)));
}
}

View File

@@ -6,5 +6,9 @@ import {IRollupVerifier} from "../../libraries/verifier/IRollupVerifier.sol";
contract MockRollupVerifier is IRollupVerifier {
/// @inheritdoc IRollupVerifier
function verifyAggregateProof(bytes calldata, bytes32) external view {}
function verifyAggregateProof(
uint256,
bytes calldata,
bytes32
) external view {}
}

View File

@@ -0,0 +1,14 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
import {IZkEvmVerifier} from "../../libraries/verifier/IZkEvmVerifier.sol";
contract MockZkEvmVerifier is IZkEvmVerifier {
event Called(address);
/// @inheritdoc IZkEvmVerifier
function verify(bytes calldata, bytes32) external view {
revert(string(abi.encode(address(this))));
}
}

View File

@@ -35,6 +35,7 @@ test-gpu-verifier: libzkp
go test -tags="gpu ffi" -timeout 0 -v ./verifier
lint: ## Lint the files - used for CI
cp -r ../common/libzkp/interface ./verifier/lib
GOBIN=$(PWD)/build/bin go run ../build/lint.go
clean: ## Empty out the bin folder

View File

@@ -99,5 +99,5 @@ func (c *CoordinatorApp) MockConfig(store bool) error {
return err
}
return os.WriteFile(c.coordinatorFile, data, 0644)
return os.WriteFile(c.coordinatorFile, data, 0600)
}

View File

@@ -1,6 +1,6 @@
module scroll-tech/coordinator
go 1.18
go 1.19
require (
github.com/agiledragon/gomonkey/v2 v2.9.0
@@ -9,6 +9,7 @@ require (
github.com/scroll-tech/go-ethereum v1.10.14-0.20230508165858-27a3830afa61
github.com/stretchr/testify v1.8.2
github.com/urfave/cli/v2 v2.17.2-0.20221006022127-8f469abc00aa
golang.org/x/exp v0.0.0-20230206171751-46f607a40771
golang.org/x/sync v0.1.0
)

View File

@@ -138,6 +138,8 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g=
golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0=
golang.org/x/exp v0.0.0-20230206171751-46f607a40771 h1:xP7rWLUr1e1n2xkK5YB4LI0hPEy3LJC6Wk+D4pGlOJg=
golang.org/x/exp v0.0.0-20230206171751-46f607a40771/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=

View File

@@ -4,7 +4,6 @@ import (
"context"
"errors"
"fmt"
mathrand "math/rand"
"sync"
"sync/atomic"
"time"
@@ -16,6 +15,7 @@ import (
"github.com/scroll-tech/go-ethereum/log"
geth_metrics "github.com/scroll-tech/go-ethereum/metrics"
"github.com/scroll-tech/go-ethereum/rpc"
"golang.org/x/exp/rand"
"scroll-tech/common/metrics"
"scroll-tech/common/types"
@@ -505,7 +505,7 @@ func (m *Manager) CollectProofs(sess *session) {
//When all rollers have finished submitting their tasks, select a winner within rollers with valid proof, and return, terminate the for loop.
if finished && len(validRollers) > 0 {
//Select a random index for this slice.
randIndex := mathrand.Intn(len(validRollers))
randIndex := rand.Int63n(int64(len(validRollers)))
_ = validRollers[randIndex]
// TODO: reward winner
@@ -569,39 +569,39 @@ func (m *Manager) APIs() []rpc.API {
// StartBasicProofGenerationSession starts a basic proof generation session
func (m *Manager) StartBasicProofGenerationSession(task *types.BlockBatch, prevSession *session) (success bool) {
var taskId string
var taskID string
if task != nil {
taskId = task.Hash
taskID = task.Hash
} else {
taskId = prevSession.info.ID
taskID = prevSession.info.ID
}
if m.GetNumberOfIdleRollers(message.BasicProve) == 0 {
log.Warn("no idle basic roller when starting proof generation session", "id", taskId)
log.Warn("no idle basic roller when starting proof generation session", "id", taskID)
return false
}
log.Info("start basic proof generation session", "id", taskId)
log.Info("start basic proof generation session", "id", taskID)
defer func() {
if !success {
if task != nil {
if err := m.orm.UpdateProvingStatus(taskId, types.ProvingTaskUnassigned); err != nil {
log.Error("fail to reset task_status as Unassigned", "id", taskId, "err", err)
if err := m.orm.UpdateProvingStatus(taskID, types.ProvingTaskUnassigned); err != nil {
log.Error("fail to reset task_status as Unassigned", "id", taskID, "err", err)
}
} else {
if err := m.orm.UpdateProvingStatus(taskId, types.ProvingTaskFailed); err != nil {
log.Error("fail to reset task_status as Failed", "id", taskId, "err", err)
if err := m.orm.UpdateProvingStatus(taskID, types.ProvingTaskFailed); err != nil {
log.Error("fail to reset task_status as Failed", "id", taskID, "err", err)
}
}
}
}()
// Get block traces.
blockInfos, err := m.orm.GetL2BlockInfos(map[string]interface{}{"batch_hash": taskId})
blockInfos, err := m.orm.GetL2BlockInfos(map[string]interface{}{"batch_hash": taskID})
if err != nil {
log.Error(
"could not GetBlockInfos",
"batch_hash", taskId,
"batch_hash", taskID,
"error", err,
)
return false
@@ -619,10 +619,10 @@ func (m *Manager) StartBasicProofGenerationSession(task *types.BlockBatch, prevS
log.Info("selectRoller returns nil")
break
}
log.Info("roller is picked", "session id", taskId, "name", roller.Name, "public key", roller.PublicKey)
log.Info("roller is picked", "session id", taskID, "name", roller.Name, "public key", roller.PublicKey)
// send trace to roller
if !roller.sendTask(&message.TaskMsg{ID: taskId, Type: message.BasicProve, BlockHashes: blockHashes}) {
log.Error("send task failed", "roller name", roller.Name, "public key", roller.PublicKey, "id", taskId)
if !roller.sendTask(&message.TaskMsg{ID: taskID, Type: message.BasicProve, BlockHashes: blockHashes}) {
log.Error("send task failed", "roller name", roller.Name, "public key", roller.PublicKey, "id", taskID)
continue
}
m.updateMetricRollerProofsLastAssignedTimestampGauge(roller.PublicKey)
@@ -630,20 +630,20 @@ func (m *Manager) StartBasicProofGenerationSession(task *types.BlockBatch, prevS
}
// No roller assigned.
if len(rollers) == 0 {
log.Error("no roller assigned", "id", taskId, "number of idle basic rollers", m.GetNumberOfIdleRollers(message.BasicProve))
log.Error("no roller assigned", "id", taskID, "number of idle basic rollers", m.GetNumberOfIdleRollers(message.BasicProve))
return false
}
// Update session proving status as assigned.
if err = m.orm.UpdateProvingStatus(taskId, types.ProvingTaskAssigned); err != nil {
log.Error("failed to update task status", "id", taskId, "err", err)
if err = m.orm.UpdateProvingStatus(taskID, types.ProvingTaskAssigned); err != nil {
log.Error("failed to update task status", "id", taskID, "err", err)
return false
}
// Create a proof generation session.
sess := &session{
info: &types.SessionInfo{
ID: taskId,
ID: taskID,
Rollers: rollers,
ProveType: message.BasicProve,
StartTimestamp: time.Now().Unix(),
@@ -672,7 +672,7 @@ func (m *Manager) StartBasicProofGenerationSession(task *types.BlockBatch, prevS
}
m.mu.Lock()
m.sessions[taskId] = sess
m.sessions[taskID] = sess
m.mu.Unlock()
go m.CollectProofs(sess)
@@ -681,26 +681,26 @@ func (m *Manager) StartBasicProofGenerationSession(task *types.BlockBatch, prevS
// StartAggProofGenerationSession starts an aggregator proof generation.
func (m *Manager) StartAggProofGenerationSession(task *types.AggTask, prevSession *session) (success bool) {
var taskId string
var taskID string
if task != nil {
taskId = task.ID
taskID = task.ID
} else {
taskId = prevSession.info.ID
taskID = prevSession.info.ID
}
if m.GetNumberOfIdleRollers(message.AggregatorProve) == 0 {
log.Warn("no idle common roller when starting proof generation session", "id", taskId)
log.Warn("no idle common roller when starting proof generation session", "id", taskID)
return false
}
log.Info("start aggregator proof generation session", "id", taskId)
log.Info("start aggregator proof generation session", "id", taskID)
defer func() {
if !success {
if task != nil {
if err := m.orm.UpdateAggTaskStatus(taskId, types.ProvingTaskUnassigned); err != nil {
log.Error("fail to reset task_status as Unassigned", "id", taskId, "err", err)
} else if err := m.orm.UpdateAggTaskStatus(taskId, types.ProvingTaskFailed); err != nil {
log.Error("fail to reset task_status as Failed", "id", taskId, "err", err)
if err := m.orm.UpdateAggTaskStatus(taskID, types.ProvingTaskUnassigned); err != nil {
log.Error("fail to reset task_status as Unassigned", "id", taskID, "err", err)
} else if err := m.orm.UpdateAggTaskStatus(taskID, types.ProvingTaskFailed); err != nil {
log.Error("fail to reset task_status as Failed", "id", taskID, "err", err)
}
}
}
@@ -708,9 +708,9 @@ func (m *Manager) StartAggProofGenerationSession(task *types.AggTask, prevSessio
}()
// get agg task from db
subProofs, err := m.orm.GetSubProofsByAggTaskID(taskId)
subProofs, err := m.orm.GetSubProofsByAggTaskID(taskID)
if err != nil {
log.Error("failed to get sub proofs for aggregator task", "id", taskId, "error", err)
log.Error("failed to get sub proofs for aggregator task", "id", taskID, "error", err)
return false
}
@@ -722,14 +722,14 @@ func (m *Manager) StartAggProofGenerationSession(task *types.AggTask, prevSessio
log.Info("selectRoller returns nil")
break
}
log.Info("roller is picked", "session id", taskId, "name", roller.Name, "type", roller.Type, "public key", roller.PublicKey)
log.Info("roller is picked", "session id", taskID, "name", roller.Name, "type", roller.Type, "public key", roller.PublicKey)
// send trace to roller
if !roller.sendTask(&message.TaskMsg{
ID: taskId,
ID: taskID,
Type: message.AggregatorProve,
SubProofs: subProofs,
}) {
log.Error("send task failed", "roller name", roller.Name, "public key", roller.PublicKey, "id", taskId)
log.Error("send task failed", "roller name", roller.Name, "public key", roller.PublicKey, "id", taskID)
continue
}
m.updateMetricRollerProofsLastAssignedTimestampGauge(roller.PublicKey)
@@ -737,20 +737,20 @@ func (m *Manager) StartAggProofGenerationSession(task *types.AggTask, prevSessio
}
// No roller assigned.
if len(rollers) == 0 {
log.Error("no roller assigned", "id", taskId, "number of idle aggregator rollers", m.GetNumberOfIdleRollers(message.AggregatorProve))
log.Error("no roller assigned", "id", taskID, "number of idle aggregator rollers", m.GetNumberOfIdleRollers(message.AggregatorProve))
return false
}
// Update session proving status as assigned.
if err = m.orm.UpdateAggTaskStatus(taskId, types.ProvingTaskAssigned); err != nil {
log.Error("failed to update task status", "id", taskId, "err", err)
if err = m.orm.UpdateAggTaskStatus(taskID, types.ProvingTaskAssigned); err != nil {
log.Error("failed to update task status", "id", taskID, "err", err)
return false
}
// Create a proof generation session.
sess := &session{
info: &types.SessionInfo{
ID: taskId,
ID: taskID,
Rollers: rollers,
ProveType: message.AggregatorProve,
StartTimestamp: time.Now().Unix(),
@@ -779,7 +779,7 @@ func (m *Manager) StartAggProofGenerationSession(task *types.AggTask, prevSessio
}
m.mu.Lock()
m.sessions[taskId] = sess
m.sessions[taskID] = sess
m.mu.Unlock()
go m.CollectProofs(sess)
@@ -797,7 +797,7 @@ func (m *Manager) VerifyToken(authMsg *message.AuthMsg) (bool, error) {
pubkey, _ := authMsg.PublicKey()
// GetValue returns nil if value is expired
if token, ok := m.tokenCache.Get(pubkey); !ok || token != authMsg.Identity.Token {
return false, fmt.Errorf("failed to find corresponding token. roller name: %s. roller pk: %s.", authMsg.Identity.Name, pubkey)
return false, fmt.Errorf("failed to find corresponding token. roller name: %s. roller pk: %s", authMsg.Identity.Name, pubkey)
}
return true, nil
}
@@ -816,8 +816,8 @@ func (m *Manager) verifyProof(proof *message.AggProof) (bool, error) {
return false, errors.New("coordinator has stopped before verification")
}
verifyResultChan := m.addVerifyTask(proof)
verifyResult := <-verifyResultChan
return verifyResult.result, verifyResult.err
result := <-verifyResultChan
return result.result, result.err
}
type verifyResult struct {

Some files were not shown because too many files have changed in this diff Show More