Compare commits

..

7 Commits

Author SHA1 Message Date
Max Wolff
487f4f2af4 wip cli 2023-06-27 21:42:24 -07:00
Max Wolff
0a57747085 add wip cli 2023-06-27 21:36:44 -07:00
Max Wolff
e59a1d4fba demo 2023-06-19 00:40:26 -07:00
Max Wolff
17bbb929b7 debug v param. sigs now work 2023-06-16 02:04:58 -07:00
Max Wolff
c95e0c1782 wip 2023-06-13 10:13:55 -07:00
Max Wolff
091da32936 add deployment script 2023-06-02 16:11:27 -07:00
Max Wolff
6155612eec wip 2023-05-31 00:33:49 -07:00
139 changed files with 1851 additions and 14724 deletions

View File

@@ -1,9 +1,9 @@
### Purpose or design rationale of this PR
## 1. Purpose or design rationale of this PR
*Describe your change. Make sure to answer these three questions: What does this PR do? Why does it do it? How does it do it?*
...
### PR title
## 2. PR title
Your PR title must follow [conventional commits](https://www.conventionalcommits.org/en/v1.0.0/#summary) (as we are doing squash merge for each PR), so it must start with one of the following [types](https://github.com/angular/angular/blob/22b96b9/CONTRIBUTING.md#type):
@@ -18,17 +18,17 @@ Your PR title must follow [conventional commits](https://www.conventionalcommits
- [ ] test: Adding missing tests or correcting existing tests
### Deployment tag versioning
## 3. Deployment tag versioning
Has `tag` in `common/version.go` been updated?
- [ ] No, this PR doesn't involve a new deployment, git tag, docker image tag
- [ ] This PR doesn't involve a new deployment, git tag, docker image tag
- [ ] Yes
### Breaking change label
## 4. Breaking change label
Does this PR have the `breaking-change` label?
- [ ] No, this PR is not a breaking change
- [ ] This PR is not a breaking change
- [ ] Yes

View File

@@ -9,8 +9,6 @@ on:
- alpha
paths:
- 'bridge/**'
- 'common/**'
- 'database/**'
- '.github/workflows/bridge.yml'
pull_request:
types:
@@ -20,61 +18,14 @@ on:
- ready_for_review
paths:
- 'bridge/**'
- 'common/**'
- 'database/**'
- '.github/workflows/bridge.yml'
defaults:
run:
working-directory: 'bridge'
jobs:
check:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.19.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install Solc
uses: supplypike/setup-bin@v3
with:
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.16/solc-static-linux'
name: 'solc'
version: '0.8.16'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
- name: Lint
working-directory: 'bridge'
run: |
rm -rf $HOME/.cache/golangci-lint
make mock_abi
make lint
goimports-lint:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.19.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install goimports
run: go install golang.org/x/tools/cmd/goimports
- name: Run goimports lint
run: goimports -local scroll-tech/bridge/ -w .
working-directory: 'bridge'
- name: Run go mod tidy
run: go mod tidy
working-directory: 'bridge'
# If there are any diffs from goimports or go mod tidy, fail.
- name: Verify no changes from goimports and go mod tidy
working-directory: 'bridge'
run: |
if [ -n "$(git status --porcelain)" ]; then
exit 1
fi
tests:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
@@ -92,24 +43,31 @@ jobs:
version: '0.8.16'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
- name: Build prerequisites
- name: Lint
run: |
make dev_docker
make -C bridge mock_abi
- name: Build bridge binaries
working-directory: 'bridge'
run: |
make bridge_bins
- name: Test bridge packages
working-directory: 'bridge'
run: |
go test -v -race -gcflags="-l" -ldflags="-s=false" -coverprofile=coverage.txt -covermode=atomic ./...
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
rm -rf $HOME/.cache/golangci-lint
make mock_abi
make lint
goimports-lint:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
flags: bridge
go-version: 1.18.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install goimports
run: go install golang.org/x/tools/cmd/goimports
- run: goimports -local scroll-tech/bridge/ -w .
- run: go mod tidy
# If there are any diffs from goimports or go mod tidy, fail.
- name: Verify no changes from goimports and go mod tidy
run: |
if [ -n "$(git status --porcelain)" ]; then
exit 1
fi
# docker-build:
# if: github.event.pull_request.draft == false
# runs-on: ubuntu-latest

View File

@@ -1,4 +1,4 @@
name: BridgeHistoryAPI
name: BridgeHistoryApi
on:
push:
@@ -32,7 +32,7 @@ jobs:
# - name: Install Go
# uses: actions/setup-go@v2
# with:
# go-version: 1.19.x
# go-version: 1.20.x
# - name: Checkout code
# uses: actions/checkout@v2
# - name: Lint
@@ -46,19 +46,13 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.19.x
go-version: 1.20.x
- name: Checkout code
uses: actions/checkout@v2
- name: Test
run: |
go get ./...
make test
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: bridge-history-api
goimports-lint:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
@@ -66,7 +60,7 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.19.x
go-version: 1.20.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install goimports
@@ -79,3 +73,4 @@ jobs:
if [ -n "$(git status --porcelain)" ]; then
exit 1
fi

View File

@@ -20,6 +20,10 @@ on:
- 'common/**'
- '.github/workflows/common.yml'
defaults:
run:
working-directory: 'common'
jobs:
check:
if: github.event.pull_request.draft == false
@@ -33,7 +37,7 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.19.x
go-version: 1.18.x
- name: Checkout code
uses: actions/checkout@v2
- name: Cache cargo
@@ -41,7 +45,6 @@ jobs:
with:
workspaces: "common/libzkp/impl -> target"
- name: Lint
working-directory: 'common'
run: |
rm -rf $HOME/.cache/golangci-lint
make lint
@@ -52,52 +55,16 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.19.x
go-version: 1.18.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install goimports
run: go install golang.org/x/tools/cmd/goimports
- name: Run goimports lint
working-directory: 'common'
run: goimports -local scroll-tech/common/ -w .
- name: Run go mod tidy
working-directory: 'common'
run: go mod tidy
- run: goimports -local scroll-tech/common/ -w .
- run: go mod tidy
# If there are any diffs from goimports or go mod tidy, fail.
- name: Verify no changes from goimports and go mod tidy
working-directory: 'common'
run: |
if [ -n "$(git status --porcelain)" ]; then
exit 1
fi
tests:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.18.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install Solc
uses: supplypike/setup-bin@v3
with:
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.16/solc-static-linux'
name: 'solc'
version: '0.8.16'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
- name: Build prerequisites
run: |
make dev_docker
- name: Test common packages
working-directory: 'common'
run: |
go test -v -race -gcflags="-l" -ldflags="-s=false" -coverprofile=coverage.txt -covermode=atomic ./...
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: common

View File

@@ -9,8 +9,6 @@ on:
- alpha
paths:
- 'coordinator/**'
- 'common/**'
- 'database/**'
- '.github/workflows/coordinator.yml'
pull_request:
types:
@@ -20,10 +18,12 @@ on:
- ready_for_review
paths:
- 'coordinator/**'
- 'common/**'
- 'database/**'
- '.github/workflows/coordinator.yml'
defaults:
run:
working-directory: 'coordinator'
jobs:
check:
if: github.event.pull_request.draft == false
@@ -37,11 +37,10 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.19.x
go-version: 1.18.x
- name: Checkout code
uses: actions/checkout@v2
- name: Lint
working-directory: 'coordinator'
run: |
rm -rf $HOME/.cache/golangci-lint
make lint
@@ -52,20 +51,15 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.19.x
go-version: 1.18.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install goimports
run: go install golang.org/x/tools/cmd/goimports
- name: Run goimports lint
working-directory: 'coordinator'
run: goimports -local scroll-tech/coordinator/ -w .
- name: Run go mod tidy
working-directory: 'coordinator'
run: go mod tidy
- run: goimports -local scroll-tech/coordinator/ -w .
- run: go mod tidy
# If there are any diffs from goimports or go mod tidy, fail.
- name: Verify no changes from goimports and go mod tidy
working-directory: 'coordinator'
run: |
if [ -n "$(git status --porcelain)" ]; then
exit 1
@@ -86,35 +80,3 @@ jobs:
# push: false
# # cache-from: type=gha,scope=${{ github.workflow }}
# # cache-to: type=gha,scope=${{ github.workflow }}
tests:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.18.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install Solc
uses: supplypike/setup-bin@v3
with:
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.16/solc-static-linux'
name: 'solc'
version: '0.8.16'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
- name: Build prerequisites
run: |
make dev_docker
- name: Test coordinator packages
working-directory: 'coordinator'
run: |
# go test -exec "env LD_LIBRARY_PATH=${PWD}/verifier/lib" -v -race -gcflags="-l" -ldflags="-s=false" -coverpkg="scroll-tech/coordinator" -coverprofile=coverage.txt -covermode=atomic ./...
go test -v -race -gcflags="-l" -ldflags="-s=false" -coverprofile=coverage.txt -covermode=atomic -tags mock_verifier ./...
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: coordinator

View File

@@ -9,7 +9,6 @@ on:
- alpha
paths:
- 'database/**'
- 'common/**'
- '.github/workflows/database.yml'
pull_request:
types:
@@ -19,9 +18,12 @@ on:
- ready_for_review
paths:
- 'database/**'
- 'common/**'
- '.github/workflows/database.yml'
defaults:
run:
working-directory: 'database'
jobs:
check:
if: github.event.pull_request.draft == false
@@ -30,11 +32,10 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.19.x
go-version: 1.18.x
- name: Checkout code
uses: actions/checkout@v2
- name: Lint
working-directory: 'database'
run: |
rm -rf $HOME/.cache/golangci-lint
make lint
@@ -45,52 +46,16 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.19.x
go-version: 1.18.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install goimports
run: go install golang.org/x/tools/cmd/goimports
- name: Run goimports lint
working-directory: 'database'
run: goimports -local scroll-tech/database/ -w .
- name: Run go mod tidy
working-directory: 'database'
run: go mod tidy
- run: goimports -local scroll-tech/database/ -w .
- run: go mod tidy
# If there are any diffs from goimports or go mod tidy, fail.
- name: Verify no changes from goimports and go mod tidy
working-directory: 'database'
run: |
if [ -n "$(git status --porcelain)" ]; then
exit 1
fi
tests:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.18.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install Solc
uses: supplypike/setup-bin@v3
with:
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.16/solc-static-linux'
name: 'solc'
version: '0.8.16'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
- name: Build prerequisites
run: |
make dev_docker
- name: Test database packages
working-directory: 'database'
run: |
go test -v -race -gcflags="-l" -ldflags="-s=false" -coverprofile=coverage.txt -covermode=atomic ./...
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: database

View File

@@ -1,43 +0,0 @@
name: Integration
on:
push:
branches:
- main
- staging
- develop
- alpha
pull_request:
types:
- opened
- reopened
- synchronize
- ready_for_review
jobs:
tests:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.18.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install Solc
uses: supplypike/setup-bin@v3
with:
uri: 'https://github.com/ethereum/solidity/releases/download/v0.8.16/solc-static-linux'
name: 'solc'
version: '0.8.16'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
- name: Build prerequisites
run: |
make dev_docker
make -C bridge mock_abi
make -C common/bytecode all
- name: Run integration tests
run: |
go test -v -tags="mock_prover mock_verifier" -p 1 -coverprofile=coverage.txt scroll-tech/integration-test/...

View File

@@ -1,59 +0,0 @@
name: Intermediate Docker
on:
workflow_dispatch:
inputs:
GO_VERSION:
description: 'Go version'
required: true
type: string
default: '1.19'
RUST_VERSION:
description: 'Rust toolchain version'
required: true
type: string
default: 'nightly-2022-12-10'
PYTHON_VERSION:
description: 'Python version'
required: false
type: string
default: '3.10'
CUDA_VERSION:
description: 'Cuda version'
required: false
type: string
default: '11.7.1'
defaults:
run:
working-directory: 'build/dockerfiles/intermediate'
jobs:
build-and-push:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build
run: |
make all
env:
GO_VERSION: ${{ inputs.GO_VERSION }}
RUST_VERSION: ${{ inputs.RUST_VERSION }}
PYTHON_VERSION: ${{ inputs.PYTHON_VERSION }}
CUDA_VERSION: ${{ inputs.CUDA_VERSION }}
- name: Publish
run: |
make publish
env:
GO_VERSION: ${{ inputs.GO_VERSION }}
RUST_VERSION: ${{ inputs.RUST_VERSION }}
PYTHON_VERSION: ${{ inputs.PYTHON_VERSION }}
CUDA_VERSION: ${{ inputs.CUDA_VERSION }}

View File

@@ -37,7 +37,7 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.19.x
go-version: 1.18.x
- name: Checkout code
uses: actions/checkout@v2
- name: Cache cargo
@@ -47,13 +47,7 @@ jobs:
- name: Test
run: |
make roller
go test -tags="mock_prover" -v -coverprofile=coverage.txt ./...
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: roller
go test -tags="mock_prover" -v ./...
check:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
@@ -61,7 +55,7 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.19.x
go-version: 1.18.x
- name: Checkout code
uses: actions/checkout@v2
- name: Lint
@@ -75,7 +69,7 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.19.x
go-version: 1.18.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install goimports

3
.gitmodules vendored
View File

@@ -13,3 +13,6 @@
[submodule "contracts/lib/solmate"]
path = contracts/lib/solmate
url = https://github.com/rari-capital/solmate
[submodule "contracts/lib/safe-contracts"]
path = contracts/lib/safe-contracts
url = https://github.com/safe-global/safe-contracts

99
Jenkinsfile vendored Normal file
View File

@@ -0,0 +1,99 @@
imagePrefix = 'scrolltech'
credentialDocker = 'dockerhub'
pipeline {
agent any
options {
timeout (20)
}
tools {
go 'go-1.18'
nodejs "nodejs"
}
environment {
GO111MODULE = 'on'
PATH="/home/ubuntu/.cargo/bin:$PATH"
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:./coordinator/verifier/lib"
CHAIN_ID='534353'
// LOG_DOCKER = 'true'
}
stages {
stage('Build') {
parallel {
stage('Build Prerequisite') {
steps {
sh 'make dev_docker'
sh 'make -C bridge mock_abi'
sh 'make -C common/bytecode all'
}
}
stage('Check Bridge Compilation') {
steps {
sh 'make -C bridge bridge_bins'
}
}
stage('Check Coordinator Compilation') {
steps {
sh 'export PATH=/home/ubuntu/go/bin:$PATH'
sh 'make -C coordinator coordinator'
}
}
stage('Check Database Compilation') {
steps {
sh 'make -C database db_cli'
}
}
stage('Check Database Docker Build') {
steps {
sh 'make -C database docker'
}
}
}
}
stage('Parallel Test') {
parallel{
stage('Race test common package') {
steps {
sh 'go test -v -race -coverprofile=coverage.common.txt -covermode=atomic scroll-tech/common/...'
}
}
stage('Race test bridge package') {
steps {
sh "cd ./bridge && ../build/run_tests.sh bridge"
}
}
stage('Race test coordinator package') {
steps {
sh 'cd ./coordinator && go test -exec "env LD_LIBRARY_PATH=${PWD}/verifier/lib" -v -race -gcflags="-l" -ldflags="-s=false" -coverpkg="scroll-tech/coordinator" -coverprofile=../coverage.coordinator.txt -covermode=atomic ./...'
}
}
stage('Race test database package') {
steps {
sh 'go test -v -race -coverprofile=coverage.db.txt -covermode=atomic scroll-tech/database/...'
}
}
stage('Integration test') {
steps {
sh 'go test -v -tags="mock_prover mock_verifier" -p 1 scroll-tech/integration-test/...'
}
}
}
}
stage('Compare Coverage') {
steps {
sh "./build/post-test-report-coverage.sh"
script {
currentBuild.result = 'SUCCESS'
}
step([$class: 'CompareCoverageAction', publishResultAs: 'Comment', scmVars: [GIT_URL: env.GIT_URL]])
}
}
}
post {
always {
publishCoverage adapters: [coberturaReportAdapter(path: 'cobertura.xml', thresholds: [[thresholdTarget: 'Aggregated Report', unhealthyThreshold: 40.0]])], checksName: '', sourceFileResolver: sourceFiles('NEVER_STORE')
cleanWs()
slackSend(message: "${JOB_BASE_NAME} ${GIT_COMMIT} #${BUILD_NUMBER} deploy ${currentBuild.result}")
}
}
}

View File

@@ -1,9 +1,9 @@
# Scroll Monorepo
[![codecov](https://codecov.io/gh/scroll-tech/scroll/branch/develop/graph/badge.svg?token=VJVHNQWGGW)](https://codecov.io/gh/scroll-tech/scroll)
[![Contracts](https://github.com/scroll-tech/scroll/actions/workflows/contracts.yaml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/contracts.yaml) [![Bridge](https://github.com/scroll-tech/scroll/actions/workflows/bridge.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/bridge.yml) [![Coordinator](https://github.com/scroll-tech/scroll/actions/workflows/coordinator.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/coordinator.yml) [![Database](https://github.com/scroll-tech/scroll/actions/workflows/database.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/database.yml) [![Common](https://github.com/scroll-tech/scroll/actions/workflows/common.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/common.yml) [![Roller](https://github.com/scroll-tech/scroll/actions/workflows/roller.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/roller.yml)
## Prerequisites
+ Go 1.19
+ Go 1.18
+ Rust (for version, see [rust-toolchain](./common/libzkp/impl/rust-toolchain))
+ Hardhat / Foundry
+ Docker

View File

@@ -325,26 +325,3 @@ type L2FailedRelayedMessageEvent struct {
type L2RelayedMessageEvent struct {
MessageHash common.Hash
}
// IScrollChainBatch is an auto generated low-level Go binding around an user-defined struct.
type IScrollChainBatch struct {
Blocks []IScrollChainBlockContext
PrevStateRoot common.Hash
NewStateRoot common.Hash
WithdrawTrieRoot common.Hash
BatchIndex uint64
ParentBatchHash common.Hash
L2Transactions []byte
}
// IScrollChainBlockContext is an auto generated low-level Go binding around an user-defined struct.
type IScrollChainBlockContext struct {
BlockHash common.Hash
ParentHash common.Hash
BlockNumber uint64
Timestamp uint64
BaseFee *big.Int
GasLimit uint64
NumTransactions uint16
NumL1Messages uint16
}

View File

@@ -5,7 +5,6 @@ import (
"os"
"github.com/ethereum/go-ethereum/log"
"github.com/iris-contrib/middleware/cors"
"github.com/kataras/iris/v12"
"github.com/kataras/iris/v12/mvc"
"github.com/urfave/cli/v2"
@@ -61,11 +60,6 @@ func init() {
}
func action(ctx *cli.Context) error {
corsOptions := cors.New(cors.Options{
AllowedOrigins: []string{"*"},
AllowedMethods: []string{"GET", "POST", "PUT", "DELETE"},
AllowCredentials: true,
})
// Load config file.
cfgFile := ctx.String(cutils.ConfigFileFlag.Name)
cfg, err := config.NewConfig(cfgFile)
@@ -78,14 +72,13 @@ func action(ctx *cli.Context) error {
}
defer database.Close()
bridgeApp := iris.New()
bridgeApp.UseRouter(corsOptions)
bridgeApp.Get("/ping", pong).Describe("healthcheck")
mvc.Configure(bridgeApp.Party("/api/txs"), setupQueryByAddressHandler)
mvc.Configure(bridgeApp.Party("/api/txsbyhashes"), setupQueryByHashHandler)
// TODO: make debug mode configurable
err = bridgeApp.Listen(cfg.Server.HostPort, iris.WithLogLevel("debug"))
err = bridgeApp.Listen(":8080", iris.WithLogLevel("debug"))
if err != nil {
log.Crit("can not start server", "err", err)
}

View File

@@ -98,14 +98,6 @@ func action(ctx *cli.Context) error {
go l2crossMsgFetcher.Start()
defer l2crossMsgFetcher.Stop()
l1BlocktimeFetcher := cross_msg.NewBlocktimestampFetcher(subCtx, uint(cfg.L1.Confirmation), int(cfg.L1.BlockTime), l1client, db.UpdateL1Blocktimestamp, db.GetL1EarliestNoBlocktimestampHeight)
go l1BlocktimeFetcher.Start()
defer l1BlocktimeFetcher.Stop()
l2BlocktimeFetcher := cross_msg.NewBlocktimestampFetcher(subCtx, uint(cfg.L2.Confirmation), int(cfg.L2.BlockTime), l2client, db.UpdateL2Blocktimestamp, db.GetL2EarliestNoBlocktimestampHeight)
go l2BlocktimeFetcher.Start()
defer l2BlocktimeFetcher.Stop()
// Catch CTRL-C to ensure a graceful shutdown.
interrupt := make(chan os.Signal, 1)
signal.Notify(interrupt, os.Interrupt)

View File

@@ -2,36 +2,33 @@
"l1": {
"confirmation": 64,
"endpoint": "https://rpc.ankr.com/eth_goerli",
"startHeight": 9090194 ,
"startHeight": 8890194 ,
"blockTime": 10,
"MessengerAddr": "0x326517Eb8eB1Ce5eaB5b513C2e9A24839b402d90",
"ETHGatewayAddr": "0x8305cB7B8448677736095965B63d7431017328fe",
"WETHGatewayAddr": "0xe3bA3c60d99a2d9a5f817734bC85353470b23931",
"StandardERC20Gateway": "0x16c1079B27eD9c363B7D08aC5Ae937A398972A5C",
"CustomERC20GatewayAddr": "0x61f08caD3d6F77801167d3bA8669433701586643",
"ERC721GatewayAddr": "0x4A73D25A4C99CB912acaf6C5B5e554f2982201c5",
"ERC1155GatewayAddr": "0xa3F5DD3033698c2832C53f3C3Fe6E062F58cD808"
"MessengerAddr": "0x5260e38080BFe97e6C4925d9209eCc5f964373b6",
"ETHGatewayAddr": "0x429b73A21cF3BF1f3E696a21A95408161daF311f",
"WETHGatewayAddr": "0x8be69E499D8848DfFb4cF9bac909f3e2cF2FeFa0",
"StandardERC20Gateway": "0xeF37207c1A1efF6D6a9d7BfF3cF4270e406d319b",
"CustomERC20GatewayAddr": "0x920f906B814597cF5DC76F95100F09CBAF9c5748",
"ERC721GatewayAddr": "0x1C441Dfc5C2eD7A2AA8636748A664E59CB029157",
"ERC1155GatewayAddr": "0xd1bE599aaCBC21448fD6373bbc7c1b4c7806f135"
},
"l2": {
"confirmation": 1,
"endpoint": "http://staging-l2geth-rpc0.scroll.tech:8545",
"endpoint": "https://alpha-rpc.scroll.io/l2",
"blockTime": 3,
"startHeight": 0,
"CustomERC20GatewayAddr": "0x905db21f836749fEeD12de781afc4A5Ab4Dd0d51",
"ERC721GatewayAddr": "0xC53D835514780664BCd7eCfcE7c2E5d9554dc41B",
"StandardERC20Gateway": "0x90271634BCB020e06ea4840C3f7aa61b8F860651",
"MessengerAddr": "0xE8b0956Ac75c65Aa1669e83888DA13afF2E108f4",
"ETHGatewayAddr": "0xD5938590D5dD8ce95812D4D515a219C12C551D67",
"WETHGatewayAddr": "0xb0aaA582564fade4232a16fdB1383004A6A7247F",
"ERC1155GatewayAddr": "0x4f33B1655619c2C0B7C450128Df760B4365Cb549"
"startHeight": 1600068,
"CustomERC20GatewayAddr": "0xa07Cb742657294C339fB4d5d6CdF3fdBeE8C1c68",
"ERC721GatewayAddr": "0x8Fee20e0C0Ef16f2898a8073531a857D11b9C700",
"StandardERC20Gateway": "0xB878F37BB278bf0e4974856fFe86f5e6F66BD725",
"MessengerAddr": "0xb75d7e84517e1504C151B270255B087Fd746D34C",
"ETHGatewayAddr": "0x32139B5C8838E94fFcD83E60dff95Daa7F0bA14c",
"WETHGatewayAddr": "0xBb88bF582F2BBa46702621dae5CB9271057bC85b",
"ERC1155GatewayAddr": "0x2946cB860028276b3C4bccE1767841641C2E0828"
},
"db": {
"dsn": "postgres://postgres:1234@localhost:5444/test?sslmode=disable",
"driverName": "postgres",
"maxOpenNum": 200,
"maxIdleNum": 20
},
"server": {
"hostPort": "0.0.0.0:20006"
}
}

View File

@@ -30,10 +30,6 @@ type LayerConfig struct {
CustomERC20GatewayAddr string `json:"CustomERC20GatewayAddr"`
}
type ServerConfig struct {
HostPort string `json:"hostPort"`
}
// Config is the configuration of the bridge history backend
type Config struct {
// chain config
@@ -41,8 +37,7 @@ type Config struct {
L2 *LayerConfig `json:"l2"`
// data source name
DB *DBConfig `json:"db"`
Server *ServerConfig `json:"server"`
DB *DBConfig `json:"db"`
}
// NewConfig returns a new instance of Config.

View File

@@ -16,7 +16,7 @@ type QueryHashController struct {
}
func (c *QueryAddressController) Get(req model.QueryByAddressRequest) (*model.QueryByAddressResponse, error) {
message, total, err := c.Service.GetTxsByAddress(common.HexToAddress(req.Address), int64(req.Offset), int64(req.Limit))
message, err := c.Service.GetTxsByAddress(common.HexToAddress(req.Address), int64(req.Offset), int64(req.Limit))
if err != nil {
return &model.QueryByAddressResponse{Message: "500", Data: &model.Data{}}, err
}
@@ -24,7 +24,7 @@ func (c *QueryAddressController) Get(req model.QueryByAddressRequest) (*model.Qu
return &model.QueryByAddressResponse{Message: "ok",
Data: &model.Data{
Result: message,
Total: total,
Total: len(message),
}}, nil
}

View File

@@ -1,79 +0,0 @@
package cross_msg
import (
"context"
"math/big"
"time"
"github.com/ethereum/go-ethereum/ethclient"
"github.com/ethereum/go-ethereum/log"
)
type GetEarliestNoBlocktimestampHeightFunc func() (uint64, error)
type UpdateBlocktimestampFunc func(height uint64, timestamp time.Time) error
type BlocktimestampFetcher struct {
ctx context.Context
confirmation uint
blockTimeInSec int
client *ethclient.Client
updateBlocktimestampFunc UpdateBlocktimestampFunc
getEarliestNoBlocktimestampHeightFunc GetEarliestNoBlocktimestampHeightFunc
}
func NewBlocktimestampFetcher(ctx context.Context, confirmation uint, blockTimeInSec int, client *ethclient.Client, updateBlocktimestampFunc UpdateBlocktimestampFunc, getEarliestNoBlocktimestampHeightFunc GetEarliestNoBlocktimestampHeightFunc) *BlocktimestampFetcher {
return &BlocktimestampFetcher{
ctx: ctx,
confirmation: confirmation,
blockTimeInSec: blockTimeInSec,
client: client,
getEarliestNoBlocktimestampHeightFunc: getEarliestNoBlocktimestampHeightFunc,
updateBlocktimestampFunc: updateBlocktimestampFunc,
}
}
func (b *BlocktimestampFetcher) Start() {
go func() {
tick := time.NewTicker(time.Duration(b.blockTimeInSec) * time.Second)
for {
select {
case <-b.ctx.Done():
tick.Stop()
return
case <-tick.C:
number, err := b.client.BlockNumber(b.ctx)
if err != nil {
log.Error("Can not get latest block number", "err", err)
continue
}
startHeight, err := b.getEarliestNoBlocktimestampHeightFunc()
if err != nil {
log.Error("Can not get latest record without block timestamp", "err", err)
continue
}
for height := startHeight; number >= height+uint64(b.confirmation) && height > 0; {
block, err := b.client.HeaderByNumber(b.ctx, new(big.Int).SetUint64(height))
if err != nil {
log.Error("Can not get block by number", "err", err)
break
}
err = b.updateBlocktimestampFunc(height, time.Unix(int64(block.Time), 0))
if err != nil {
log.Error("Can not update blocktimstamp into DB ", "err", err)
break
}
height, err = b.getEarliestNoBlocktimestampHeightFunc()
if err != nil {
log.Error("Can not get latest record without block timestamp", "err", err)
break
}
}
}
}
}()
}
func (b *BlocktimestampFetcher) Stop() {
log.Info("BlocktimestampFetcher Stop")
b.ctx.Done()
}

View File

@@ -1,6 +1,7 @@
package cross_msg_test
import (
"bridge-history-api/cross_msg"
"crypto/rand"
"math/big"
"testing"
@@ -8,8 +9,6 @@ import (
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
"github.com/stretchr/testify/assert"
"bridge-history-api/cross_msg"
)
func TestMergeIntoList(t *testing.T) {

View File

@@ -1,190 +0,0 @@
package cross_msg
import (
"github.com/ethereum/go-ethereum/common"
"bridge-history-api/utils"
)
// MaxHeight is the maixium possible height of withdraw trie
const MaxHeight = 40
// WithdrawTrie is an append only merkle trie
type WithdrawTrie struct {
// used to rebuild the merkle tree
NextMessageNonce uint64
height int // current height of withdraw trie
branches []common.Hash
zeroes []common.Hash
}
// NewWithdrawTrie will return a new instance of WithdrawTrie
func NewWithdrawTrie() *WithdrawTrie {
zeroes := make([]common.Hash, MaxHeight)
branches := make([]common.Hash, MaxHeight)
zeroes[0] = common.Hash{}
for i := 1; i < MaxHeight; i++ {
zeroes[i] = utils.Keccak2(zeroes[i-1], zeroes[i-1])
}
return &WithdrawTrie{
zeroes: zeroes,
branches: branches,
height: -1,
NextMessageNonce: 0,
}
}
// Initialize will initialize the merkle trie with rightest leaf node
func (w *WithdrawTrie) Initialize(currentMessageNonce uint64, msgHash common.Hash, proofBytes []byte) {
proof := DecodeBytesToMerkleProof(proofBytes)
branches := RecoverBranchFromProof(proof, currentMessageNonce, msgHash)
w.height = len(proof)
w.branches = branches
w.NextMessageNonce = currentMessageNonce + 1
}
// AppendMessages appends a list of new messages as leaf nodes to the rightest of the tree and returns the proofs for all messages.
func (w *WithdrawTrie) AppendMessages(hashes []common.Hash) [][]byte {
length := len(hashes)
if length == 0 {
return make([][]byte, 0)
}
cache := make([]map[uint64]common.Hash, MaxHeight)
for h := 0; h < MaxHeight; h++ {
cache[h] = make(map[uint64]common.Hash)
}
// cache all branches will be used later.
if w.NextMessageNonce != 0 {
index := w.NextMessageNonce
for h := 0; h <= w.height; h++ {
if index%2 == 1 {
// right child, `w.branches[h]` is the corresponding left child
// the index of left child should be `index ^ 1`.
cache[h][index^1] = w.branches[h]
}
index >>= 1
}
}
// cache all new leaves
for i := 0; i < length; i++ {
cache[0][w.NextMessageNonce+uint64(i)] = hashes[i]
}
// build withdraw trie with new hashes
minIndex := w.NextMessageNonce
maxIndex := w.NextMessageNonce + uint64(length) - 1
for h := 0; maxIndex > 0; h++ {
if minIndex%2 == 1 {
minIndex--
}
if maxIndex%2 == 0 {
cache[h][maxIndex^1] = w.zeroes[h]
}
for i := minIndex; i <= maxIndex; i += 2 {
cache[h+1][i>>1] = utils.Keccak2(cache[h][i], cache[h][i^1])
}
minIndex >>= 1
maxIndex >>= 1
}
// update branches using hashes one by one
for i := 0; i < length; i++ {
proof := UpdateBranchWithNewMessage(w.zeroes, w.branches, w.NextMessageNonce, hashes[i])
w.NextMessageNonce++
w.height = len(proof)
}
proofs := make([][]byte, length)
// retrieve merkle proof from cache
for i := 0; i < length; i++ {
index := w.NextMessageNonce + uint64(i) - uint64(length)
var merkleProof []common.Hash
for h := 0; h < w.height; h++ {
merkleProof = append(merkleProof, cache[h][index^1])
index >>= 1
}
proofs[i] = EncodeMerkleProofToBytes(merkleProof)
}
return proofs
}
// MessageRoot return the current root hash of withdraw trie.
func (w *WithdrawTrie) MessageRoot() common.Hash {
if w.height == -1 {
return common.Hash{}
}
return w.branches[w.height]
}
// DecodeBytesToMerkleProof transfer byte array to bytes32 array. The caller should make sure the length is matched.
func DecodeBytesToMerkleProof(proofBytes []byte) []common.Hash {
proof := make([]common.Hash, len(proofBytes)/32)
for i := 0; i < len(proofBytes); i += 32 {
proof[i/32] = common.BytesToHash(proofBytes[i : i+32])
}
return proof
}
// EncodeMerkleProofToBytes transfer byte32 array to byte array by concatenation.
func EncodeMerkleProofToBytes(proof []common.Hash) []byte {
var proofBytes []byte
for i := 0; i < len(proof); i++ {
proofBytes = append(proofBytes, proof[i][:]...)
}
return proofBytes
}
// UpdateBranchWithNewMessage update the branches to latest with new message and return the merkle proof for the message.
func UpdateBranchWithNewMessage(zeroes []common.Hash, branches []common.Hash, index uint64, msgHash common.Hash) []common.Hash {
root := msgHash
var merkleProof []common.Hash
var height uint64
for height = 0; index > 0; height++ {
if index%2 == 0 {
// it may be used in next round.
branches[height] = root
merkleProof = append(merkleProof, zeroes[height])
// it's a left child, the right child must be null
root = utils.Keccak2(root, zeroes[height])
} else {
// it's a right child, use previously computed hash
root = utils.Keccak2(branches[height], root)
merkleProof = append(merkleProof, branches[height])
}
index >>= 1
}
branches[height] = root
return merkleProof
}
// RecoverBranchFromProof will recover latest branches from merkle proof and message hash
func RecoverBranchFromProof(proof []common.Hash, index uint64, msgHash common.Hash) []common.Hash {
branches := make([]common.Hash, 64)
root := msgHash
var height uint64
for height = 0; index > 0; height++ {
if index%2 == 0 {
branches[height] = root
// it's a left child, the right child must be null
root = utils.Keccak2(root, proof[height])
} else {
// it's a right child, use previously computed hash
branches[height] = proof[height]
root = utils.Keccak2(proof[height], root)
}
index >>= 1
}
branches[height] = root
for height++; height < 64; height++ {
branches[height] = common.Hash{}
}
return branches
}

View File

@@ -1,213 +0,0 @@
package cross_msg_test
import (
"math/big"
"testing"
"github.com/ethereum/go-ethereum/common"
"github.com/stretchr/testify/assert"
"bridge-history-api/cross_msg"
"bridge-history-api/utils"
)
func TestUpdateBranchWithNewMessage(t *testing.T) {
zeroes := make([]common.Hash, 64)
branches := make([]common.Hash, 64)
zeroes[0] = common.Hash{}
for i := 1; i < 64; i++ {
zeroes[i] = utils.Keccak2(zeroes[i-1], zeroes[i-1])
}
cross_msg.UpdateBranchWithNewMessage(zeroes, branches, 0, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000001"))
if branches[0] != common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000001") {
t.Fatalf("Invalid root, want %s, got %s", "0x0000000000000000000000000000000000000000000000000000000000000001", branches[0].Hex())
}
cross_msg.UpdateBranchWithNewMessage(zeroes, branches, 1, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000002"))
if branches[1] != common.HexToHash("0xe90b7bceb6e7df5418fb78d8ee546e97c83a08bbccc01a0644d599ccd2a7c2e0") {
t.Fatalf("Invalid root, want %s, got %s", "0xe90b7bceb6e7df5418fb78d8ee546e97c83a08bbccc01a0644d599ccd2a7c2e0", branches[1].Hex())
}
cross_msg.UpdateBranchWithNewMessage(zeroes, branches, 2, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000003"))
if branches[2] != common.HexToHash("0x222ff5e0b5877792c2bc1670e2ccd0c2c97cd7bb1672a57d598db05092d3d72c") {
t.Fatalf("Invalid root, want %s, got %s", "0x222ff5e0b5877792c2bc1670e2ccd0c2c97cd7bb1672a57d598db05092d3d72c", branches[2].Hex())
}
cross_msg.UpdateBranchWithNewMessage(zeroes, branches, 3, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000004"))
if branches[2] != common.HexToHash("0xa9bb8c3f1f12e9aa903a50c47f314b57610a3ab32f2d463293f58836def38d36") {
t.Fatalf("Invalid root, want %s, got %s", "0xa9bb8c3f1f12e9aa903a50c47f314b57610a3ab32f2d463293f58836def38d36", branches[2].Hex())
}
}
func TestDecodeEncodeMerkleProof(t *testing.T) {
proof := cross_msg.DecodeBytesToMerkleProof(common.Hex2Bytes("2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d49012ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d49022ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d49032ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4904"))
if len(proof) != 4 {
t.Fatalf("proof length mismatch, want %d, got %d", 4, len(proof))
}
if proof[0] != common.HexToHash("0x2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4901") {
t.Fatalf("proof[0] mismatch, want %s, got %s", "0x2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4901", proof[0].Hex())
}
if proof[1] != common.HexToHash("0x2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4902") {
t.Fatalf("proof[1] mismatch, want %s, got %s", "0x2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4902", proof[0].Hex())
}
if proof[2] != common.HexToHash("0x2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4903") {
t.Fatalf("proof[2] mismatch, want %s, got %s", "0x2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4903", proof[0].Hex())
}
if proof[3] != common.HexToHash("0x2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4904") {
t.Fatalf("proof[3] mismatch, want %s, got %s", "0x2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4904", proof[0].Hex())
}
bytes := cross_msg.EncodeMerkleProofToBytes(proof)
if common.Bytes2Hex(bytes) != "2ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d49012ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d49022ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d49032ebffc1a6671c51e30777a680904b103992630ec995b6e6ff76a04d5259d4904" {
t.Fatalf("wrong encoded bytes")
}
}
func TestRecoverBranchFromProof(t *testing.T) {
zeroes := make([]common.Hash, 64)
branches := make([]common.Hash, 64)
zeroes[0] = common.Hash{}
for i := 1; i < 64; i++ {
zeroes[i] = utils.Keccak2(zeroes[i-1], zeroes[i-1])
}
proof := cross_msg.UpdateBranchWithNewMessage(zeroes, branches, 0, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000001"))
tmpBranches := cross_msg.RecoverBranchFromProof(proof, 0, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000001"))
for i := 0; i < 64; i++ {
if tmpBranches[i] != branches[i] {
t.Fatalf("Invalid branch, want %s, got %s", branches[i].Hex(), tmpBranches[i].Hex())
}
}
proof = cross_msg.UpdateBranchWithNewMessage(zeroes, branches, 1, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000002"))
tmpBranches = cross_msg.RecoverBranchFromProof(proof, 1, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000002"))
for i := 0; i < 64; i++ {
if tmpBranches[i] != branches[i] {
t.Fatalf("Invalid branch, want %s, got %s", branches[i].Hex(), tmpBranches[i].Hex())
}
}
proof = cross_msg.UpdateBranchWithNewMessage(zeroes, branches, 2, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000003"))
tmpBranches = cross_msg.RecoverBranchFromProof(proof, 2, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000003"))
for i := 0; i < 64; i++ {
if tmpBranches[i] != branches[i] {
t.Fatalf("Invalid branch, want %s, got %s", branches[i].Hex(), tmpBranches[i].Hex())
}
}
proof = cross_msg.UpdateBranchWithNewMessage(zeroes, branches, 3, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000004"))
tmpBranches = cross_msg.RecoverBranchFromProof(proof, 3, common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000004"))
for i := 0; i < 64; i++ {
if tmpBranches[i] != branches[i] {
t.Fatalf("Invalid branch, want %s, got %s", branches[i].Hex(), tmpBranches[i].Hex())
}
}
}
func TestWithdrawTrieOneByOne(t *testing.T) {
for initial := 0; initial < 128; initial++ {
withdrawTrie := cross_msg.NewWithdrawTrie()
var hashes []common.Hash
for i := 0; i < initial; i++ {
hash := common.BigToHash(big.NewInt(int64(i + 1)))
hashes = append(hashes, hash)
withdrawTrie.AppendMessages([]common.Hash{
hash,
})
}
for i := initial; i < 128; i++ {
hash := common.BigToHash(big.NewInt(int64(i + 1)))
hashes = append(hashes, hash)
expectedRoot := computeMerkleRoot(hashes)
proofBytes := withdrawTrie.AppendMessages([]common.Hash{
hash,
})
assert.Equal(t, withdrawTrie.NextMessageNonce, uint64(i+1))
assert.Equal(t, expectedRoot.String(), withdrawTrie.MessageRoot().String())
proof := cross_msg.DecodeBytesToMerkleProof(proofBytes[0])
verifiedRoot := verifyMerkleProof(uint64(i), hash, proof)
assert.Equal(t, expectedRoot.String(), verifiedRoot.String())
}
}
}
func TestWithdrawTrieMultiple(t *testing.T) {
var expectedRoots []common.Hash
{
var hashes []common.Hash
for i := 0; i < 128; i++ {
hash := common.BigToHash(big.NewInt(int64(i + 1)))
hashes = append(hashes, hash)
expectedRoots = append(expectedRoots, computeMerkleRoot(hashes))
}
}
for initial := 0; initial < 100; initial++ {
var hashes []common.Hash
for i := 0; i < initial; i++ {
hash := common.BigToHash(big.NewInt(int64(i + 1)))
hashes = append(hashes, hash)
}
for finish := initial; finish < 100; finish++ {
withdrawTrie := cross_msg.NewWithdrawTrie()
withdrawTrie.AppendMessages(hashes)
var newHashes []common.Hash
for i := initial; i <= finish; i++ {
hash := common.BigToHash(big.NewInt(int64(i + 1)))
newHashes = append(newHashes, hash)
}
proofBytes := withdrawTrie.AppendMessages(newHashes)
assert.Equal(t, withdrawTrie.NextMessageNonce, uint64(finish+1))
assert.Equal(t, expectedRoots[finish].String(), withdrawTrie.MessageRoot().String())
for i := initial; i <= finish; i++ {
hash := common.BigToHash(big.NewInt(int64(i + 1)))
proof := cross_msg.DecodeBytesToMerkleProof(proofBytes[i-initial])
verifiedRoot := verifyMerkleProof(uint64(i), hash, proof)
assert.Equal(t, expectedRoots[finish].String(), verifiedRoot.String())
}
}
}
}
func verifyMerkleProof(index uint64, leaf common.Hash, proof []common.Hash) common.Hash {
root := leaf
for _, h := range proof {
if index%2 == 0 {
root = utils.Keccak2(root, h)
} else {
root = utils.Keccak2(h, root)
}
index >>= 1
}
return root
}
func computeMerkleRoot(hashes []common.Hash) common.Hash {
if len(hashes) == 0 {
return common.Hash{}
}
zeroHash := common.Hash{}
for {
if len(hashes) == 1 {
break
}
var newHashes []common.Hash
for i := 0; i < len(hashes); i += 2 {
if i+1 < len(hashes) {
newHashes = append(newHashes, utils.Keccak2(hashes[i], hashes[i+1]))
} else {
newHashes = append(newHashes, utils.Keccak2(hashes[i], zeroHash))
}
}
hashes = newHashes
zeroHash = utils.Keccak2(zeroHash, zeroHash)
}
return hashes[0]
}

View File

@@ -2,24 +2,23 @@
-- +goose StatementBegin
create table cross_message
(
id BIGSERIAL PRIMARY KEY,
msg_hash VARCHAR NOT NULL DEFAULT '',
height BIGINT NOT NULL,
sender VARCHAR NOT NULL,
target VARCHAR NOT NULL,
amount VARCHAR NOT NULL,
layer1_hash VARCHAR NOT NULL DEFAULT '',
layer2_hash VARCHAR NOT NULL DEFAULT '',
layer1_token VARCHAR NOT NULL DEFAULT '',
layer2_token VARCHAR NOT NULL DEFAULT '',
token_id BIGINT NOT NULL DEFAULT 0,
asset SMALLINT NOT NULL,
msg_type SMALLINT NOT NULL,
is_deleted BOOLEAN NOT NULL DEFAULT FALSE,
block_timestamp TIMESTAMP(0) DEFAULT NULL,
created_at TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at TIMESTAMP(0) DEFAULT NULL
id BIGSERIAL PRIMARY KEY,
msg_hash VARCHAR NOT NULL DEFAULT '',
height BIGINT NOT NULL,
sender VARCHAR NOT NULL,
target VARCHAR NOT NULL,
amount VARCHAR NOT NULL,
layer1_hash VARCHAR NOT NULL DEFAULT '',
layer2_hash VARCHAR NOT NULL DEFAULT '',
layer1_token VARCHAR NOT NULL DEFAULT '',
layer2_token VARCHAR NOT NULL DEFAULT '',
token_id BIGINT NOT NULL DEFAULT 0,
asset SMALLINT NOT NULL,
msg_type SMALLINT NOT NULL,
is_deleted BOOLEAN NOT NULL DEFAULT FALSE,
created_at TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at TIMESTAMP(0) DEFAULT NULL
);
comment
@@ -49,20 +48,20 @@ CREATE TRIGGER update_timestamp BEFORE UPDATE
ON cross_message FOR EACH ROW EXECUTE PROCEDURE
update_timestamp();
CREATE OR REPLACE FUNCTION deleted_at_trigger()
CREATE OR REPLACE FUNCTION delete_at_trigger()
RETURNS TRIGGER AS $$
BEGIN
IF NEW.is_deleted AND OLD.is_deleted != NEW.is_deleted THEN
UPDATE cross_message SET deleted_at = NOW() WHERE id = NEW.id;
UPDATE cross_message SET delete_at = NOW() WHERE id = NEW.id;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER deleted_at_trigger
CREATE TRIGGER delete_at_trigger
AFTER UPDATE ON cross_message
FOR EACH ROW
EXECUTE FUNCTION deleted_at_trigger();
EXECUTE FUNCTION delete_at_trigger();
-- +goose StatementEnd

View File

@@ -31,20 +31,20 @@ CREATE TRIGGER update_timestamp BEFORE UPDATE
ON relayed_msg FOR EACH ROW EXECUTE PROCEDURE
update_timestamp();
CREATE OR REPLACE FUNCTION deleted_at_trigger()
CREATE OR REPLACE FUNCTION delete_at_trigger()
RETURNS TRIGGER AS $$
BEGIN
IF NEW.is_deleted AND OLD.is_deleted != NEW.is_deleted THEN
UPDATE relayed_msg SET deleted_at = NOW() WHERE id = NEW.id;
UPDATE relayed_msg SET delete_at = NOW() WHERE id = NEW.id;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER deleted_at_trigger
CREATE TRIGGER delete_at_trigger
AFTER UPDATE ON relayed_msg
FOR EACH ROW
EXECUTE FUNCTION deleted_at_trigger();
EXECUTE FUNCTION delete_at_trigger();
-- +goose StatementEnd

View File

@@ -54,7 +54,6 @@ type CrossMsg struct {
Asset int `json:"asset" db:"asset"`
MsgType int `json:"msg_type" db:"msg_type"`
IsDeleted bool `json:"is_deleted" db:"is_deleted"`
Timestamp *time.Time `json:"timestamp" db:"block_timestamp"`
CreatedAt *time.Time `json:"created_at" db:"created_at"`
UpdatedAt *time.Time `json:"updated_at" db:"updated_at"`
DeletedAt *time.Time `json:"deleted_at" db:"deleted_at"`
@@ -77,8 +76,6 @@ type L1CrossMsgOrm interface {
UpdateL1CrossMsgHash(ctx context.Context, l1Hash, msgHash common.Hash) error
GetLatestL1ProcessedHeight() (int64, error)
DeleteL1CrossMsgAfterHeightDBTx(dbTx *sqlx.Tx, height int64) error
UpdateL1Blocktimestamp(height uint64, timestamp time.Time) error
GetL1EarliestNoBlocktimestampHeight() (uint64, error)
}
// L2CrossMsgOrm provides operations on l2_cross_message table
@@ -91,8 +88,6 @@ type L2CrossMsgOrm interface {
UpdateL2CrossMsgHash(ctx context.Context, l2Hash, msgHash common.Hash) error
GetLatestL2ProcessedHeight() (int64, error)
DeleteL2CrossMsgFromHeightDBTx(dbTx *sqlx.Tx, height int64) error
UpdateL2Blocktimestamp(height uint64, timestamp time.Time) error
GetL2EarliestNoBlocktimestampHeight() (uint64, error)
}
type RelayedMsgOrm interface {

View File

@@ -4,7 +4,6 @@ import (
"context"
"database/sql"
"errors"
"time"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/log"
@@ -119,22 +118,3 @@ func (l *l1CrossMsgOrm) DeleteL1CrossMsgAfterHeightDBTx(dbTx *sqlx.Tx, height in
}
return nil
}
func (l *l1CrossMsgOrm) UpdateL1Blocktimestamp(height uint64, timestamp time.Time) error {
if _, err := l.db.Exec(`UPDATE cross_message SET block_timestamp = $1 where height = $2 AND msg_type = $3 AND NOT is_deleted`, timestamp, height, Layer1Msg); err != nil {
return err
}
return nil
}
func (l *l1CrossMsgOrm) GetL1EarliestNoBlocktimestampHeight() (uint64, error) {
row := l.db.QueryRowx(`SELECT height FROM cross_message WHERE block_timestamp IS NULL AND msg_type = $1 AND NOT is_deleted ORDER BY height ASC LIMIT 1;`, Layer1Msg)
var result uint64
if err := row.Scan(&result); err != nil {
if err == sql.ErrNoRows {
return 0, nil
}
return 0, err
}
return result, nil
}

View File

@@ -4,7 +4,6 @@ import (
"context"
"database/sql"
"errors"
"time"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/log"
@@ -22,7 +21,7 @@ func NewL2CrossMsgOrm(db *sqlx.DB) L2CrossMsgOrm {
func (l *l2CrossMsgOrm) GetL2CrossMsgByHash(l2Hash common.Hash) (*CrossMsg, error) {
result := &CrossMsg{}
row := l.db.QueryRowx(`SELECT * FROM cross_message WHERE layer2_hash = $1 AND NOT is_deleted;`, l2Hash.String())
row := l.db.QueryRowx(`SELECT * FROM l2_cross_message WHERE layer2_hash = $1 AND NOT is_deleted;`, l2Hash.String())
if err := row.StructScan(result); err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, nil
@@ -122,22 +121,3 @@ func (l *l2CrossMsgOrm) GetLatestL2ProcessedHeight() (int64, error) {
}
return 0, nil
}
func (l *l2CrossMsgOrm) UpdateL2Blocktimestamp(height uint64, timestamp time.Time) error {
if _, err := l.db.Exec(`UPDATE cross_message SET block_timestamp = $1 where height = $2 AND msg_type = $3 AND NOT is_deleted`, timestamp, height, Layer2Msg); err != nil {
return err
}
return nil
}
func (l *l2CrossMsgOrm) GetL2EarliestNoBlocktimestampHeight() (uint64, error) {
row := l.db.QueryRowx(`SELECT height FROM cross_message WHERE block_timestamp IS NULL AND msg_type = $1 AND NOT is_deleted ORDER BY height ASC LIMIT 1;`, Layer2Msg)
var result uint64
if err := row.Scan(&result); err != nil {
if err == sql.ErrNoRows {
return 0, nil
}
return 0, err
}
return result, nil
}

View File

@@ -16,7 +16,6 @@ type OrmFactory interface {
orm.L1CrossMsgOrm
orm.L2CrossMsgOrm
orm.RelayedMsgOrm
GetTotalCrossMsgCountByAddress(sender string) (uint64, error)
GetCrossMsgsByAddressWithOffset(sender string, offset int64, limit int64) ([]*orm.CrossMsg, error)
GetDB() *sqlx.DB
Beginx() (*sqlx.Tx, error)
@@ -60,19 +59,10 @@ func (o *ormFactory) Beginx() (*sqlx.Tx, error) {
return o.DB.Beginx()
}
func (o *ormFactory) GetTotalCrossMsgCountByAddress(sender string) (uint64, error) {
var count uint64
row := o.DB.QueryRowx(`SELECT COUNT(*) FROM cross_message WHERE sender = $1 AND NOT is_deleted;`, sender)
if err := row.Scan(&count); err != nil {
return 0, err
}
return count, nil
}
func (o *ormFactory) GetCrossMsgsByAddressWithOffset(sender string, offset int64, limit int64) ([]*orm.CrossMsg, error) {
para := sender
var results []*orm.CrossMsg
rows, err := o.DB.Queryx(`SELECT * FROM cross_message WHERE sender = $1 AND NOT is_deleted ORDER BY block_timestamp DESC NULLS FIRST, id DESC LIMIT $2 OFFSET $3;`, para, limit, offset)
rows, err := o.DB.Queryx(`SELECT * FROM cross_message WHERE sender = $1 AND NOT is_deleted ORDER BY id DESC LIMIT $2 OFFSET $3;`, para, limit, offset)
if err != nil || rows == nil {
return nil, err
}

View File

@@ -1,10 +1,9 @@
module bridge-history-api
go 1.19
go 1.20
require (
github.com/ethereum/go-ethereum v1.12.0
github.com/iris-contrib/middleware/cors v0.0.0-20230531125531-980d3a09a458
github.com/ethereum/go-ethereum v1.11.6
github.com/jmoiron/sqlx v1.3.5
github.com/kataras/iris/v12 v12.2.0
github.com/lib/pq v1.10.7
@@ -27,7 +26,6 @@ require (
github.com/andybalholm/brotli v1.0.5 // indirect
github.com/aymerick/douceur v0.2.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bits-and-blooms/bitset v1.7.0 // indirect
github.com/blang/semver/v4 v4.0.0 // indirect
github.com/btcsuite/btcd v0.20.1-beta // indirect
github.com/btcsuite/btcd/btcec/v2 v2.3.2 // indirect
@@ -36,17 +34,14 @@ require (
github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b // indirect
github.com/cockroachdb/pebble v0.0.0-20230209160836-829675f94811 // indirect
github.com/cockroachdb/redact v1.1.3 // indirect
github.com/consensys/bavard v0.1.13 // indirect
github.com/consensys/gnark-crypto v0.10.0 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect
github.com/crate-crypto/go-kzg-4844 v0.2.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/deckarep/golang-set/v2 v2.1.0 // indirect
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0 // indirect
github.com/deepmap/oapi-codegen v1.8.2 // indirect
github.com/docker/docker v20.10.21+incompatible // indirect
github.com/edsrzf/mmap-go v1.0.0 // indirect
github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385 // indirect
github.com/ethereum/c-kzg-4844 v0.2.0 // indirect
github.com/fatih/structs v1.1.0 // indirect
github.com/fjl/memsize v0.0.0-20190710130421-bcb5799ab5e5 // indirect
github.com/flosch/pongo2/v4 v4.0.2 // indirect
@@ -97,7 +92,6 @@ require (
github.com/microcosm-cc/bluemonday v1.0.23 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/mitchellh/pointerstructure v1.2.0 // indirect
github.com/mmcloughlin/addchain v0.4.0 // indirect
github.com/nats-io/nats.go v1.23.0 // indirect
github.com/nats-io/nkeys v0.3.0 // indirect
github.com/nats-io/nuid v1.0.1 // indirect
@@ -121,7 +115,6 @@ require (
github.com/shirou/gopsutil v3.21.11+incompatible // indirect
github.com/sirupsen/logrus v1.9.0 // indirect
github.com/status-im/keycard-go v0.2.0 // indirect
github.com/supranational/blst v0.3.11-0.20230406105308-e9dfc5ee724b // indirect
github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 // indirect
github.com/tdewolff/minify/v2 v2.12.4 // indirect
github.com/tdewolff/parse/v2 v2.6.4 // indirect
@@ -149,6 +142,5 @@ require (
gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
rsc.io/tmplfunc v0.0.3 // indirect
)

View File

@@ -32,8 +32,6 @@ github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd3
github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible/go.mod h1:osfaiScAUVup+UC9Nfq76eWqDhXlp+4UYaA8uhTBO6g=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bits-and-blooms/bitset v1.7.0 h1:YjAGVd3XmtK9ktAbX8Zg2g2PwLIMjGREZJHlV4j7NEo=
github.com/bits-and-blooms/bitset v1.7.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA=
github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM=
github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ=
github.com/btcsuite/btcd v0.20.1-beta h1:Ik4hyJqN8Jfyv3S4AGBOmyouMsYE3EdYODkMbQjwPGw=
@@ -67,18 +65,12 @@ github.com/cockroachdb/pebble v0.0.0-20230209160836-829675f94811/go.mod h1:Nb5lg
github.com/cockroachdb/redact v1.1.3 h1:AKZds10rFSIj7qADf0g46UixK8NNLwWTNdCIGS5wfSQ=
github.com/cockroachdb/redact v1.1.3/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg=
github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM=
github.com/consensys/bavard v0.1.13 h1:oLhMLOFGTLdlda/kma4VOJazblc7IM5y5QPd2A/YjhQ=
github.com/consensys/bavard v0.1.13/go.mod h1:9ItSMtA/dXMAiL7BG6bqW2m3NdSEObYWoH223nGHukI=
github.com/consensys/gnark-crypto v0.10.0 h1:zRh22SR7o4K35SoNqouS9J/TKHTyU2QWaj5ldehyXtA=
github.com/consensys/gnark-crypto v0.10.0/go.mod h1:Iq/P3HHl0ElSjsg2E1gsMwhAyxnxoKK5nVyZKd+/KhU=
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w=
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/crate-crypto/go-kzg-4844 v0.2.0 h1:UVuHOE+5tIWrim4zf/Xaa43+MIsDCPyW76QhUpiMGj4=
github.com/crate-crypto/go-kzg-4844 v0.2.0/go.mod h1:SBP7ikXEgDnUPONgm33HtuDZEDtWa3L4QtN1ocJSEQ4=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/cyberdelia/templates v0.0.0-20141128023046-ca7fffd4298c/go.mod h1:GyV+0YP4qX0UQ7r2MoYZ+AvYDp12OF5yg4q8rGnyNh4=
github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@@ -100,6 +92,8 @@ github.com/djherbis/atime v1.1.0/go.mod h1:28OF6Y8s3NQWwacXc5eZTsEsiMzp7LF8MbXE+
github.com/docker/docker v20.10.21+incompatible h1:UTLdBmHk3bEY+w8qeO5KttOhy6OmXWsl/FEet9Uswog=
github.com/docker/docker v20.10.21+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/edsrzf/mmap-go v1.0.0 h1:CEBF7HpRnUCSJgGUb5h1Gm7e3VkmVDrR8lvWVLtrOFw=
github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M=
github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385 h1:clC1lXBpe2kTj2VHdaIu9ajZQe4kcEY9j0NsnDDBZ3o=
github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385/go.mod h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
@@ -107,10 +101,8 @@ github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.m
github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/etcd-io/bbolt v1.3.3/go.mod h1:ZF2nL25h33cCyBtcyWeZ2/I3HQOfTP+0PIEvHjkjCrw=
github.com/ethereum/c-kzg-4844 v0.2.0 h1:+cUvymlnoDDQgMInp25Bo3OmLajmmY8mLJ/tLjqd77Q=
github.com/ethereum/c-kzg-4844 v0.2.0/go.mod h1:WI2Nd82DMZAAZI1wV2neKGost9EKjvbpQR9OqE5Qqa8=
github.com/ethereum/go-ethereum v1.12.0 h1:bdnhLPtqETd4m3mS8BGMNvBTf36bO5bx/hxE2zljOa0=
github.com/ethereum/go-ethereum v1.12.0/go.mod h1:/oo2X/dZLJjf2mJ6YT9wcWxa4nNJDBKDBU6sFIpx1Gs=
github.com/ethereum/go-ethereum v1.11.6 h1:2VF8Mf7XiSUfmoNOy3D+ocfl9Qu8baQBrCNbo2CXQ8E=
github.com/ethereum/go-ethereum v1.11.6/go.mod h1:+a8pUj1tOyJ2RinsNQD4326YS+leSoKGiG/uVVb0x6Y=
github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8=
github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo=
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
@@ -201,7 +193,6 @@ github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
@@ -242,8 +233,6 @@ github.com/iris-contrib/go.uuid v2.0.0+incompatible h1:XZubAYg61/JwnJNbZilGjf3b3
github.com/iris-contrib/go.uuid v2.0.0+incompatible/go.mod h1:iz2lgM/1UnEf1kP0L/+fafWORmlnuysV2EMP8MW+qe0=
github.com/iris-contrib/httpexpect/v2 v2.12.1 h1:3cTZSyBBen/kfjCtgNFoUKi1u0FVXNaAjyRJOo6AVS4=
github.com/iris-contrib/jade v1.1.3/go.mod h1:H/geBymxJhShH5kecoiOCSssPX7QWYH7UaeZTSWddIk=
github.com/iris-contrib/middleware/cors v0.0.0-20230531125531-980d3a09a458 h1:V60rHQJc6DieKV1BqHIGclraPdO4kinuFAZIrPGHN7s=
github.com/iris-contrib/middleware/cors v0.0.0-20230531125531-980d3a09a458/go.mod h1:7eVziAp1yUwFB/ZMg71n84VWQH+7wukvxcHuF2e7cbg=
github.com/iris-contrib/pongo2 v0.0.1/go.mod h1:Ssh+00+3GAZqSQb30AvBRNxBx7rf0GqwkjqxNd0u65g=
github.com/iris-contrib/schema v0.0.1/go.mod h1:urYA3uvUNG1TIIjOSCzHr9/LmbQo8LrOcOqfqxa4hXw=
github.com/iris-contrib/schema v0.0.6 h1:CPSBLyx2e91H2yJzPuhGuifVRnZBBJ3pCOMbOvPZaTw=
@@ -301,7 +290,6 @@ github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0
github.com/labstack/echo/v4 v4.2.1/go.mod h1:AA49e0DZ8kk5jTOOCKNuPR6oTnBS0dYiM4FW1e6jwpg=
github.com/labstack/echo/v4 v4.5.0/go.mod h1:czIriw4a0C1dFun+ObrXp7ok03xON0N1awStJ6ArI7Y=
github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k=
github.com/leanovate/gopter v0.2.9 h1:fQjYxZaynp97ozCzfOyOuAGOU4aU/z37zf/tOujFk7c=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw=
github.com/lib/pq v1.10.7/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
@@ -353,9 +341,6 @@ github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyua
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/pointerstructure v1.2.0 h1:O+i9nHnXS3l/9Wu7r4NrEdwA2VFTicjUEN1uBnDo34A=
github.com/mitchellh/pointerstructure v1.2.0/go.mod h1:BRAsLI5zgXmw97Lf6s25bs8ohIXc3tViBH44KcwB2g4=
github.com/mmcloughlin/addchain v0.4.0 h1:SobOdjm2xLj1KkXN5/n0xTIWyZA2+s99UCY1iPfkHRY=
github.com/mmcloughlin/addchain v0.4.0/go.mod h1:A86O+tHqZLMNO4w6ZZ4FlVQEadcoqkyU72HC5wJ4RlU=
github.com/mmcloughlin/profile v0.1.1/go.mod h1:IhHD7q1ooxgwTgjxQYkACGA77oFTDdFVejUS1/tS/qU=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
@@ -468,8 +453,6 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8=
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/supranational/blst v0.3.11-0.20230406105308-e9dfc5ee724b h1:u49mjRnygnB34h8OKbnNJFVUtWSKIKb1KukdV8bILUM=
github.com/supranational/blst v0.3.11-0.20230406105308-e9dfc5ee724b/go.mod h1:jZJtfjgudtNl4en1tzwPIV3KjUnQUvG3/j+w+fVonLw=
github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 h1:epCh84lMvA70Z7CTTCmYQn2CKbY8j86K7/FAIr141uY=
github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7/go.mod h1:q4W45IWZaF22tdD+VEXcAWRA037jwmWEB5VWYORlTpc=
github.com/tdewolff/minify/v2 v2.12.4 h1:kejsHQMM17n6/gwdw53qsi6lg0TGddZADVyQOz1KMdE=
@@ -737,5 +720,3 @@ modernc.org/sqlite v1.18.1 h1:ko32eKt3jf7eqIkCgPAeHMBXw3riNSLhl2f3loEF7o8=
modernc.org/strutil v1.1.2 h1:iFBDH6j1Z0bN/Q9udJnnFoFpENA4252qe/7/5woE5MI=
modernc.org/token v1.0.0 h1:a0jaWiNMDhDUtqOj09wvjWWAqd3q7WpBulmL9H2egsk=
moul.io/http2curl/v2 v2.3.0 h1:9r3JfDzWPcbIklMOs2TnIFzDYvfAZvjeavG6EzP7jYs=
rsc.io/tmplfunc v0.0.3 h1:53XFQh69AfOa8Tw0Jm7t+GV7KZhOi6jzsCzTtKbMvzU=
rsc.io/tmplfunc v0.0.3/go.mod h1:AG3sTPzElb1Io3Yg4voV9AGZJuleGAwaVRxL9M49PhA=

View File

@@ -4,7 +4,7 @@ import "bridge-history-api/service"
type Data struct {
Result []*service.TxHistoryInfo `json:"result"`
Total uint64 `json:"total"`
Total int `json:"total"`
}
type QueryByAddressResponse struct {

View File

@@ -32,7 +32,7 @@ type TxHistoryInfo struct {
// HistoryService example service.
type HistoryService interface {
GetTxsByAddress(address common.Address, offset int64, limit int64) ([]*TxHistoryInfo, uint64, error)
GetTxsByAddress(address common.Address, offset int64, limit int64) ([]*TxHistoryInfo, error)
GetTxsByHashes(hashes []string) ([]*TxHistoryInfo, error)
}
@@ -69,25 +69,20 @@ func updateCrossTxHash(msgHash string, txInfo *TxHistoryInfo, db db.OrmFactory)
}
func (h *historyBackend) GetTxsByAddress(address common.Address, offset int64, limit int64) ([]*TxHistoryInfo, uint64, error) {
var txHistories []*TxHistoryInfo
total, err := h.db.GetTotalCrossMsgCountByAddress(address.String())
if err != nil || total == 0 {
return txHistories, 0, err
}
func (h *historyBackend) GetTxsByAddress(address common.Address, offset int64, limit int64) ([]*TxHistoryInfo, error) {
txHistories := make([]*TxHistoryInfo, 0)
result, err := h.db.GetCrossMsgsByAddressWithOffset(address.String(), offset, limit)
if err != nil {
return nil, 0, err
return nil, err
}
for _, msg := range result {
txHistory := &TxHistoryInfo{
Hash: msg.Layer1Hash + msg.Layer2Hash,
Amount: msg.Amount,
To: msg.Target,
IsL1: msg.MsgType == int(orm.Layer1Msg),
BlockNumber: msg.Height,
BlockTimestamp: msg.Timestamp,
CreatedAt: msg.CreatedAt,
Hash: msg.MsgHash,
Amount: msg.Amount,
To: msg.Target,
IsL1: msg.MsgType == int(orm.Layer1Msg),
BlockNumber: msg.Height,
CreatedAt: msg.CreatedAt,
FinalizeTx: &Finalized{
Hash: "",
},
@@ -95,7 +90,7 @@ func (h *historyBackend) GetTxsByAddress(address common.Address, offset int64, l
updateCrossTxHash(msg.MsgHash, txHistory, h.db)
txHistories = append(txHistories, txHistory)
}
return txHistories, total, nil
return txHistories, nil
}
func (h *historyBackend) GetTxsByHashes(hashes []string) ([]*TxHistoryInfo, error) {
@@ -107,13 +102,12 @@ func (h *historyBackend) GetTxsByHashes(hashes []string) ([]*TxHistoryInfo, erro
}
if l1result != nil {
txHistory := &TxHistoryInfo{
Hash: l1result.Layer1Hash,
Amount: l1result.Amount,
To: l1result.Target,
IsL1: true,
BlockNumber: l1result.Height,
BlockTimestamp: l1result.Timestamp,
CreatedAt: l1result.CreatedAt,
Hash: l1result.Layer1Hash,
Amount: l1result.Amount,
To: l1result.Target,
IsL1: true,
BlockNumber: l1result.Height,
CreatedAt: l1result.CreatedAt,
FinalizeTx: &Finalized{
Hash: "",
},
@@ -128,13 +122,12 @@ func (h *historyBackend) GetTxsByHashes(hashes []string) ([]*TxHistoryInfo, erro
}
if l2result != nil {
txHistory := &TxHistoryInfo{
Hash: l2result.Layer2Hash,
Amount: l2result.Amount,
To: l2result.Target,
IsL1: false,
BlockNumber: l2result.Height,
BlockTimestamp: l2result.Timestamp,
CreatedAt: l2result.CreatedAt,
Hash: l2result.Layer2Hash,
Amount: l2result.Amount,
To: l2result.Target,
IsL1: false,
BlockNumber: l2result.Height,
CreatedAt: l2result.CreatedAt,
FinalizeTx: &Finalized{
Hash: "",
},

View File

@@ -1,9 +1,7 @@
package utils
import (
"bytes"
"context"
"errors"
"fmt"
"math/big"
@@ -16,11 +14,6 @@ import (
backendabi "bridge-history-api/abi"
)
// Keccak2 compute the keccack256 of two concatenations of bytes32
func Keccak2(a common.Hash, b common.Hash) common.Hash {
return common.BytesToHash(crypto.Keccak256(append(a.Bytes()[:], b.Bytes()[:]...)))
}
func GetSafeBlockNumber(ctx context.Context, client *ethclient.Client, confirmations uint64) (uint64, error) {
number, err := client.BlockNumber(ctx)
if err != nil || number <= confirmations {
@@ -61,48 +54,3 @@ func ComputeMessageHash(
data, _ := backendabi.L2ScrollMessengerABI.Pack("relayMessage", sender, target, value, messageNonce, message)
return common.BytesToHash(crypto.Keccak256(data))
}
// GetBatchRangeFromCalldataV1 find the block range from calldata, both inclusive.
func GetBatchRangeFromCalldataV1(calldata []byte) ([]uint64, []uint64, []uint64, error) {
var batchIndices []uint64
var startBlocks []uint64
var finishBlocks []uint64
if bytes.Equal(calldata[0:4], common.Hex2Bytes("cb905499")) {
// commitBatches
method := backendabi.ScrollChainABI.Methods["commitBatches"]
values, err := method.Inputs.Unpack(calldata[4:])
if err != nil {
return batchIndices, startBlocks, finishBlocks, err
}
args := make([]backendabi.IScrollChainBatch, len(values))
err = method.Inputs.Copy(&args, values)
if err != nil {
return batchIndices, startBlocks, finishBlocks, err
}
for i := 0; i < len(args); i++ {
batchIndices = append(batchIndices, args[i].BatchIndex)
startBlocks = append(startBlocks, args[i].Blocks[0].BlockNumber)
finishBlocks = append(finishBlocks, args[i].Blocks[len(args[i].Blocks)-1].BlockNumber)
}
} else if bytes.Equal(calldata[0:4], common.Hex2Bytes("8c73235d")) {
// commitBatch
method := backendabi.ScrollChainABI.Methods["commitBatch"]
values, err := method.Inputs.Unpack(calldata[4:])
if err != nil {
return batchIndices, startBlocks, finishBlocks, err
}
args := backendabi.IScrollChainBatch{}
err = method.Inputs.Copy(&args, values)
if err != nil {
return batchIndices, startBlocks, finishBlocks, err
}
batchIndices = append(batchIndices, args.BatchIndex)
startBlocks = append(startBlocks, args.Blocks[0].BlockNumber)
finishBlocks = append(finishBlocks, args.Blocks[len(args.Blocks)-1].BlockNumber)
} else {
return batchIndices, startBlocks, finishBlocks, errors.New("invalid selector")
}
return batchIndices, startBlocks, finishBlocks, nil
}

View File

@@ -1,48 +0,0 @@
package utils_test
import (
"os"
"testing"
"github.com/ethereum/go-ethereum/common"
"github.com/stretchr/testify/assert"
"bridge-history-api/utils"
)
func TestKeccak2(t *testing.T) {
a := common.HexToHash("0xe90b7bceb6e7df5418fb78d8ee546e97c83a08bbccc01a0644d599ccd2a7c2e0")
b := common.HexToHash("0x222ff5e0b5877792c2bc1670e2ccd0c2c97cd7bb1672a57d598db05092d3d72c")
c := utils.Keccak2(a, b)
assert.NotEmpty(t, c)
assert.NotEqual(t, a, c)
assert.NotEqual(t, b, c)
assert.Equal(t, "0xc0ffbd7f501bd3d49721b0724b2bff657cb2378f15d5a9b97cd7ea5bf630d512", c.Hex())
}
func TestGetBatchRangeFromCalldataV1(t *testing.T) {
calldata, err := os.ReadFile("../testdata/commit-batches-0x3095e91db7ba4a6fbf4654d607db322e58ff5579c502219c8024acaea74cf311.txt")
assert.NoError(t, err)
// multiple batches
batchIndices, startBlocks, finishBlocks, err := utils.GetBatchRangeFromCalldataV1(common.Hex2Bytes(string(calldata[:])))
assert.NoError(t, err)
assert.Equal(t, len(batchIndices), 5)
assert.Equal(t, len(startBlocks), 5)
assert.Equal(t, len(finishBlocks), 5)
assert.Equal(t, batchIndices[0], uint64(1))
assert.Equal(t, batchIndices[1], uint64(2))
assert.Equal(t, batchIndices[2], uint64(3))
assert.Equal(t, batchIndices[3], uint64(4))
assert.Equal(t, batchIndices[4], uint64(5))
assert.Equal(t, startBlocks[0], uint64(1))
assert.Equal(t, startBlocks[1], uint64(6))
assert.Equal(t, startBlocks[2], uint64(7))
assert.Equal(t, startBlocks[3], uint64(19))
assert.Equal(t, startBlocks[4], uint64(20))
assert.Equal(t, finishBlocks[0], uint64(5))
assert.Equal(t, finishBlocks[1], uint64(6))
assert.Equal(t, finishBlocks[2], uint64(18))
assert.Equal(t, finishBlocks[3], uint64(19))
assert.Equal(t, finishBlocks[4], uint64(20))
}

View File

@@ -80,7 +80,6 @@
"batch_commit_time_sec": 1200,
"batch_blocks_limit": 100,
"commit_tx_calldata_size_limit": 200000,
"commit_tx_batch_count_limit": 30,
"public_input_config": {
"max_tx_num": 44,
"padding_tx_hash": "0x0000000000000000000000000000000000000000000000000000000000000000"

View File

@@ -1,6 +1,6 @@
module scroll-tech/bridge
go 1.19
go 1.18
require (
github.com/agiledragon/gomonkey/v2 v2.9.0

View File

@@ -44,8 +44,6 @@ type BatchProposerConfig struct {
CommitTxCalldataSizeLimit uint64 `json:"commit_tx_calldata_size_limit"`
// Commit tx calldata min size limit in bytes
CommitTxCalldataMinSize uint64 `json:"commit_tx_calldata_min_size,omitempty"`
// Max number of batches in a commit transaction
CommitTxBatchCountLimit uint64 `json:"commit_tx_batch_count_limit"`
// The public input hash config
PublicInputConfig *types.PublicInputHashConfig `json:"public_input_config"`
}

View File

@@ -46,7 +46,6 @@ type BatchProposer struct {
commitCalldataSizeLimit uint64
batchDataBufferSizeLimit uint64
commitCalldataMinSize uint64
commitBatchCountLimit int
proofGenerationFreq uint64
batchDataBuffer []*bridgeTypes.BatchData
@@ -73,7 +72,6 @@ func NewBatchProposer(ctx context.Context, cfg *config.BatchProposerConfig, rela
batchCommitTimeSec: cfg.BatchCommitTimeSec,
commitCalldataSizeLimit: cfg.CommitTxCalldataSizeLimit,
commitCalldataMinSize: cfg.CommitTxCalldataMinSize,
commitBatchCountLimit: int(cfg.CommitTxBatchCountLimit),
batchDataBufferSizeLimit: 100*cfg.CommitTxCalldataSizeLimit + 1*1024*1024, // @todo: determine the value.
proofGenerationFreq: cfg.ProofGenerationFreq,
piCfg: cfg.PublicInputConfig,
@@ -204,7 +202,7 @@ func (p *BatchProposer) TryCommitBatches() {
index := 0
commit := false
calldataByteLen := uint64(0)
for ; index < len(p.batchDataBuffer) && index < p.commitBatchCountLimit; index++ {
for ; index < len(p.batchDataBuffer); index++ {
calldataByteLen += bridgeAbi.GetBatchCalldataLength(&p.batchDataBuffer[index].Batch)
if calldataByteLen > p.commitCalldataSizeLimit {
commit = true

View File

@@ -113,12 +113,11 @@ func testBatchProposerBatchGeneration(t *testing.T) {
assert.NoError(t, err)
proposer := NewBatchProposer(context.Background(), &config.BatchProposerConfig{
ProofGenerationFreq: 1,
BatchGasThreshold: 3000000,
BatchTxNumThreshold: 135,
BatchTimeSec: 1,
BatchBlocksLimit: 100,
CommitTxBatchCountLimit: 30,
ProofGenerationFreq: 1,
BatchGasThreshold: 3000000,
BatchTxNumThreshold: 135,
BatchTimeSec: 1,
BatchBlocksLimit: 100,
}, relayer, db)
proposer.TryProposeBatch()
@@ -190,12 +189,11 @@ func testBatchProposerGracefulRestart(t *testing.T) {
assert.Equal(t, batchData2.Hash().Hex(), batchHashes[0])
// test p.recoverBatchDataBuffer().
_ = NewBatchProposer(context.Background(), &config.BatchProposerConfig{
ProofGenerationFreq: 1,
BatchGasThreshold: 3000000,
BatchTxNumThreshold: 135,
BatchTimeSec: 1,
BatchBlocksLimit: 100,
CommitTxBatchCountLimit: 30,
ProofGenerationFreq: 1,
BatchGasThreshold: 3000000,
BatchTxNumThreshold: 135,
BatchTimeSec: 1,
BatchBlocksLimit: 100,
}, relayer, db)
batchHashes, err = blockBatchOrm.GetBlockBatchesHashByRollupStatus(types.RollupPending, math.MaxInt32)

View File

@@ -236,7 +236,6 @@ func prepareAuth(t *testing.T, l2Cli *ethclient.Client, privateKey *ecdsa.Privat
assert.NoError(t, err)
auth.GasPrice, err = l2Cli.SuggestGasPrice(context.Background())
assert.NoError(t, err)
auth.GasLimit = 500000
return auth
}

View File

@@ -1,46 +1,45 @@
ifeq ($(GO_VERSION),)
GO_VERSION=1.19
endif
ifeq ($(RUST_VERSION),)
RUST_VERSION=nightly-2022-12-10
endif
ifeq ($(PYTHON_VERSION),)
PYTHON_VERSION=3.10
endif
ifeq ($(CUDA_VERSION),)
CUDA_VERSION=11.7.1
endif
GO_VERSION := 1.18
PYTHON_VERSION := 3.10
RUST_VERSION := nightly-2022-12-10
.PHONY: all go-alpine-builder rust-builder rust-alpine-builder go-rust-alpine-builder go-rust-builder cuda-go-rust-builder py-runner
cuda-go-rust-builder:
docker build -t scrolltech/cuda-go-rust-builder:cuda-$(CUDA_VERSION)-go-$(GO_VERSION)-rust-$(RUST_VERSION) -f cuda-go-rust-builder.Dockerfile ./ --build-arg CUDA_VERSION=$(CUDA_VERSION) --build-arg GO_VERSION=$(GO_VERSION) --build-arg RUST_VERSION=$(RUST_VERSION)
.PHONY: all go-alpine-builder rust-builder rust-alpine-builder go-rust-alpine-builder go-rust-builder py-runner
go-rust-builder:
docker build -t scrolltech/go-rust-builder:go-$(GO_VERSION)-rust-$(RUST_VERSION) -f go-rust-builder.Dockerfile ./ --build-arg GO_VERSION=$(GO_VERSION) --build-arg RUST_VERSION=$(RUST_VERSION)
docker build -t scrolltech/go-rust-builder:latest -f go-rust-builder.Dockerfile ./
docker image tag scrolltech/go-rust-builder:latest scrolltech/go-rust-builder:go-$(GO_VERSION)-rust-$(RUST_VERSION)
go-alpine-builder:
docker build -t scrolltech/go-alpine-builder:$(GO_VERSION) -f go-alpine-builder.Dockerfile ./ --build-arg GO_VERSION=$(GO_VERSION)
docker build -t scrolltech/go-alpine-builder:latest -f go-alpine-builder.Dockerfile ./
docker image tag scrolltech/go-alpine-builder:latest scrolltech/go-alpine-builder:$(GO_VERSION)
rust-builder:
docker build -t scrolltech/rust-builder:$(RUST_VERSION) -f rust-builder.Dockerfile ./ --build-arg RUST_VERSION=$(RUST_VERSION)
docker build -t scrolltech/rust-builder:latest -f rust-builder.Dockerfile ./
docker image tag scrolltech/rust-builder:latest scrolltech/rust-builder:$(RUST_VERSION)
rust-alpine-builder:
docker build -t scrolltech/rust-alpine-builder:$(RUST_VERSION) -f rust-alpine-builder.Dockerfile ./ --build-arg RUST_VERSION=$(RUST_VERSION)
docker build -t scrolltech/rust-alpine-builder:latest -f rust-alpine-builder.Dockerfile ./
docker image tag scrolltech/rust-alpine-builder:latest scrolltech/rust-alpine-builder:$(RUST_VERSION)
go-rust-alpine-builder:
docker build -t scrolltech/go-rust-alpine-builder:go-$(GO_VERSION)-rust-$(RUST_VERSION) -f go-rust-alpine-builder.Dockerfile ./ --build-arg GO_VERSION=$(GO_VERSION) --build-arg RUST_VERSION=$(RUST_VERSION)
docker build -t scrolltech/go-rust-alpine-builder:latest -f go-rust-alpine-builder.Dockerfile ./
docker image tag scrolltech/go-rust-alpine-builder:latest scrolltech/go-rust-alpine-builder:go-$(GO_VERSION)-rust-$(RUST_VERSION)
py-runner:
docker build -t scrolltech/py-runner:$(PYTHON_VERSION) -f py-runner.Dockerfile ./ --build-arg PYTHON_VERSION=$(PYTHON_VERSION)
docker build -t scrolltech/py-runner:latest -f py-runner.Dockerfile ./
docker image tag scrolltech/py-runner:latest scrolltech/py-runner:$(PYTHON_VERSION)
all: go-alpine-builder rust-builder rust-alpine-builder go-rust-alpine-builder go-rust-builder cuda-go-rust-builder py-runner
all: go-alpine-builder rust-builder rust-alpine-builder go-rust-alpine-builder go-rust-builder py-runner
publish:
docker push scrolltech/go-alpine-builder:latest
docker push scrolltech/go-alpine-builder:$(GO_VERSION)
docker push scrolltech/rust-builder:latest
docker push scrolltech/rust-builder:$(RUST_VERSION)
docker push scrolltech/rust-alpine-builder:latest
docker push scrolltech/rust-alpine-builder:$(RUST_VERSION)
docker push scrolltech/go-rust-alpine-builder:latest
docker push scrolltech/go-rust-alpine-builder:go-$(GO_VERSION)-rust-$(RUST_VERSION)
docker push scrolltech/go-rust-builder:latest
docker push scrolltech/go-rust-builder:go-$(GO_VERSION)-rust-$(RUST_VERSION)
docker push scrolltech/cuda-go-rust-builder:cuda-$(CUDA_VERSION)-go-$(GO_VERSION)-rust-$(RUST_VERSION)
docker push scrolltech/py-runner:latest
docker push scrolltech/py-runner:$(PYTHON_VERSION)

View File

@@ -1,35 +0,0 @@
ARG CUDA_VERSION=11.7.1
ARG GO_VERSION=1.19
ARG RUST_VERSION=nightly-2022-12-10
ARG CARGO_CHEF_TAG=0.1.41
FROM nvidia/cuda:${CUDA_VERSION}-devel-ubuntu22.04
RUN apt-get update
# Install basic packages
RUN apt-get install build-essential curl wget git pkg-config --no-install-recommends -y
# Install dev-packages
RUN apt-get install libclang-dev libssl-dev cmake llvm --no-install-recommends -y
# Install related libs
RUN apt install libprocps-dev libboost-all-dev libmpfr-dev libgmp-dev --no-install-recommends -y
# Clean installed cache
RUN rm -rf /var/lib/apt/lists/*
# Install Rust
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
ENV PATH="/root/.cargo/bin:${PATH}"
ENV CARGO_HOME=/root/.cargo
# Add Toolchain
ARG RUST_VERSION
RUN rustup toolchain install ${RUST_VERSION}
ARG CARGO_CHEF_TAG
RUN cargo install cargo-chef --locked --version ${CARGO_CHEF_TAG} \
&& rm -rf $CARGO_HOME/registry/
# Install Go
ARG GO_VERSION
RUN rm -rf /usr/local/go
RUN wget https://go.dev/dl/go${GO_VERSION}.1.linux-amd64.tar.gz
RUN tar -C /usr/local -xzf go${GO_VERSION}.1.linux-amd64.tar.gz
RUN rm go${GO_VERSION}.1.linux-amd64.tar.gz
ENV PATH="/usr/local/go/bin:${PATH}"

View File

@@ -1,6 +1,4 @@
ARG GO_VERSION=1.19
FROM golang:${GO_VERSION}-alpine
FROM golang:1.18-alpine
# ENV GOPROXY https://goproxy.cn,direct

View File

@@ -1,8 +1,6 @@
ARG GO_VERSION=1.19
ARG RUST_VERSION=nightly-2022-12-10
FROM golang:1.18-alpine
ARG CARGO_CHEF_TAG=0.1.41
FROM golang:${GO_VERSION}-alpine
ARG DEFAULT_RUST_TOOLCHAIN=nightly-2022-12-10
RUN apk add --no-cache gcc musl-dev linux-headers git ca-certificates openssl-dev
@@ -26,14 +24,12 @@ RUN set -eux; \
wget "$url"; \
chmod +x rustup-init;
ARG RUST_VERSION
RUN ./rustup-init -y --no-modify-path --default-toolchain ${RUST_VERSION}; \
RUN ./rustup-init -y --no-modify-path --default-toolchain ${DEFAULT_RUST_TOOLCHAIN}; \
rm rustup-init; \
chmod -R a+w $RUSTUP_HOME $CARGO_HOME; \
rustup --version; \
cargo --version; \
rustc --version;
ARG CARGO_CHEF_TAG
RUN cargo install cargo-chef --locked --version ${CARGO_CHEF_TAG} \
&& rm -rf $CARGO_HOME/registry/

View File

@@ -1,13 +1,10 @@
ARG GO_VERSION=1.19
ARG RUST_VERSION=nightly-2022-12-10
ARG CARGO_CHEF_TAG=0.1.41
FROM ubuntu:20.04
RUN apt-get update && ln -fs /usr/share/zoneinfo/America/New_York /etc/localtime
# Install basic packages
RUN apt-get install build-essential curl wget git pkg-config -y
# Install dev-packages
RUN apt-get install libclang-dev libssl-dev llvm -y
@@ -15,17 +12,23 @@ RUN apt-get install libclang-dev libssl-dev llvm -y
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
ENV PATH="/root/.cargo/bin:${PATH}"
ENV CARGO_HOME=/root/.cargo
# Add Toolchain
ARG RUST_VERSION
RUN rustup toolchain install ${RUST_VERSION}
ARG CARGO_CHEF_TAG
RUN rustup toolchain install nightly-2022-12-10
# TODO: make this ARG
ENV CARGO_CHEF_TAG=0.1.41
RUN cargo install cargo-chef --locked --version ${CARGO_CHEF_TAG} \
&& rm -rf $CARGO_HOME/registry/
# Install Go
ARG GO_VERSION
RUN rm -rf /usr/local/go
RUN wget https://go.dev/dl/go${GO_VERSION}.1.linux-amd64.tar.gz
RUN tar -C /usr/local -xzf go${GO_VERSION}.1.linux-amd64.tar.gz
RUN rm go${GO_VERSION}.1.linux-amd64.tar.gz
# for 1.17
# RUN wget https://go.dev/dl/go1.17.13.linux-amd64.tar.gz
# RUN tar -C /usr/local -xzf go1.17.13.linux-amd64.tar.gz
# for 1.18
RUN wget https://go.dev/dl/go1.18.9.linux-amd64.tar.gz
RUN tar -C /usr/local -xzf go1.18.9.linux-amd64.tar.gz
ENV PATH="/usr/local/go/bin:${PATH}"

View File

@@ -1,4 +1,3 @@
ARG PYTHON_VERSION=3.10
FROM python:${PYTHON_VERSION}-alpine
FROM python:3.10-alpine3.15
RUN apk add --no-cache gcc g++ make musl-dev

View File

@@ -1,8 +1,7 @@
ARG ALPINE_VERSION=3.15
ARG RUST_VERSION=nightly-2022-12-10
ARG CARGO_CHEF_TAG=0.1.41
FROM alpine:${ALPINE_VERSION}
ARG CARGO_CHEF_TAG=0.1.41
ARG DEFAULT_RUST_TOOLCHAIN=nightly-2022-12-10
RUN apk add --no-cache \
ca-certificates \
@@ -28,14 +27,12 @@ RUN set -eux; \
wget "$url"; \
chmod +x rustup-init;
ARG RUST_VERSION
RUN ./rustup-init -y --no-modify-path --default-toolchain ${RUST_VERSION}; \
RUN ./rustup-init -y --no-modify-path --default-toolchain ${DEFAULT_RUST_TOOLCHAIN}; \
rm rustup-init; \
chmod -R a+w $RUSTUP_HOME $CARGO_HOME; \
rustup --version; \
cargo --version; \
rustc --version;
ARG CARGO_CHEF_TAG
RUN cargo install cargo-chef --locked --version ${CARGO_CHEF_TAG} \
&& rm -rf $CARGO_HOME/registry/

View File

@@ -1,6 +1,3 @@
ARG RUST_VERSION=nightly-2022-12-10
ARG CARGO_CHEF_TAG=0.1.41
FROM ubuntu:20.04
RUN apt-get update && ln -fs /usr/share/zoneinfo/America/New_York /etc/localtime
@@ -14,10 +11,6 @@ RUN apt-get install libclang-dev libssl-dev llvm -y
# Install Rust
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
ENV PATH="/root/.cargo/bin:${PATH}"
ENV CARGO_HOME=/root/.cargo
# Add Toolchain
ARG RUST_VERSION
RUN rustup toolchain install ${RUST_VERSION}
ARG CARGO_CHEF_TAG
RUN cargo install cargo-chef --locked --version ${CARGO_CHEF_TAG} \
&& rm -rf $CARGO_HOME/registry/
RUN rustup toolchain install nightly-2022-12-10

View File

@@ -15,7 +15,7 @@ import (
const (
// GolangCIVersion to be used for linting.
GolangCIVersion = "github.com/golangci/golangci-lint/cmd/golangci-lint@v1.52.2"
GolangCIVersion = "github.com/golangci/golangci-lint/cmd/golangci-lint@v1.50.0"
)
// GOBIN environment variable.

View File

@@ -0,0 +1,14 @@
#!/bin/bash
set -uex
${GOROOT}/bin/bin/gocover-cobertura < coverage.bridge.txt > coverage.bridge.xml
${GOROOT}/bin/bin/gocover-cobertura < coverage.db.txt > coverage.db.xml
${GOROOT}/bin/bin/gocover-cobertura < coverage.common.txt > coverage.common.xml
${GOROOT}/bin/bin/gocover-cobertura < coverage.coordinator.txt > coverage.coordinator.xml
#${GOROOT}/bin/bin/gocover-cobertura < coverage.integration.txt > coverage.integration.xml
npx cobertura-merge -o cobertura.xml \
package1=coverage.bridge.xml \
package2=coverage.db.xml \
package3=coverage.common.xml \
package4=coverage.coordinator.xml
# package5=coverage.integration.xml

View File

@@ -0,0 +1,85 @@
imagePrefix = 'scrolltech'
credentialDocker = 'dockerhub'
TAGNAME = ''
pipeline {
agent any
options {
timeout (20)
}
tools {
go 'go-1.18'
nodejs "nodejs"
}
environment {
GO111MODULE = 'on'
PATH="/home/ubuntu/.cargo/bin:$PATH"
// LOG_DOCKER = 'true'
}
stages {
stage('Tag') {
steps {
script {
TAGNAME = sh(returnStdout: true, script: 'git tag -l --points-at HEAD')
sh "echo ${TAGNAME}"
// ...
}
}
}
stage('Build') {
environment {
// Extract the username and password of our credentials into "DOCKER_CREDENTIALS_USR" and "DOCKER_CREDENTIALS_PSW".
// (NOTE 1: DOCKER_CREDENTIALS will be set to "your_username:your_password".)
// The new variables will always be YOUR_VARIABLE_NAME + _USR and _PSW.
// (NOTE 2: You can't print credentials in the pipeline for security reasons.)
DOCKER_CREDENTIALS = credentials('dockerhub')
}
steps {
withCredentials([usernamePassword(credentialsId: "${credentialDocker}", passwordVariable: 'dockerPassword', usernameVariable: 'dockerUser')]) {
// Use a scripted pipeline.
script {
stage('Push image') {
if (TAGNAME == ""){
return;
}
sh "docker login --username=$dockerUser --password=$dockerPassword"
catchError(buildResult: 'SUCCESS', stageResult: 'SUCCESS') {
script {
try {
sh "docker manifest inspect scrolltech/bridge:$TAGNAME > /dev/null"
} catch (e) {
// only build if the tag non existed
//sh "docker login --username=${dockerUser} --password=${dockerPassword}"
sh "make -C bridge docker"
sh "docker tag scrolltech/bridge:latest scrolltech/bridge:${TAGNAME}"
sh "docker push scrolltech/bridge:${TAGNAME}"
throw e
}
}
}
catchError(buildResult: 'SUCCESS', stageResult: 'SUCCESS') {
script {
try {
sh "docker manifest inspect scrolltech/coordinator:$TAGNAME > /dev/null"
} catch (e) {
// only build if the tag non existed
//sh "docker login --username=${dockerUser} --password=${dockerPassword}"
sh "make -C coordinator docker"
sh "docker tag scrolltech/coordinator:latest scrolltech/coordinator:${TAGNAME}"
sh "docker push scrolltech/coordinator:${TAGNAME}"
throw e
}
}
}
}
}
}
}
}
}
post {
always {
cleanWs()
slackSend(message: "${JOB_BASE_NAME} ${GIT_COMMIT} #${TAGNAME} Tag build ${currentBuild.result}")
}
}
}

View File

@@ -1,38 +0,0 @@
coverage:
status:
project: off
patch: off
flag_management:
default_rules:
carryforward: true
individual_flags:
- name: bridge
statuses:
- type: project
target: auto
threshold: 1%
- name: bridge-history-api
statuses:
- type: project
target: auto
threshold: 1%
- name: common
statuses:
- type: project
target: auto
threshold: 1%
- name: coordinator
statuses:
- type: project
target: auto
threshold: 1%
- name: database
statuses:
- type: project
target: auto
threshold: 1%
- name: roller
statuses:
- type: project
target: auto
threshold: 1%

View File

@@ -1,6 +1,6 @@
module scroll-tech/common
go 1.19
go 1.18
require (
github.com/docker/docker v20.10.21+incompatible

View File

@@ -29,7 +29,7 @@
{
"type": 2,
"nonce": 2,
"txHash": "0x6b50040f5f14bad253f202b0775d6742131bcaee6b992f05578386f00e53b7e4",
"txHash": "0xaaaeb971adfac989c7db5426737bc2932756091a5730ea6d5324f93e4cff9713",
"gas": 1152994,
"gasPrice": "0x3b9b0a17",
"from": "0x1c5a77d9fa7ef466951b2f01f724bca3a5820b63",

View File

@@ -1,868 +0,0 @@
{
"coinbase": {
"address": "0x5300000000000000000000000000000000000005",
"nonce": 0,
"balance": "0x2aa86921dcd2c0",
"keccakCodeHash": "0x256e306f068f0847c8aab5819879b2ff45c021ce2e2f428be51be663415b1d60",
"poseidonCodeHash": "0x2c49d7de76e39008575f2f090bb3e90912bad475ea8102c8565c249a75575df5",
"codeSize": 1652
},
"header": {
"parentHash": "0xe761181afb179bc4e6848ecc4e32af82c0eeff4aca77024f985d1dffb0ba0013",
"sha3Uncles": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
"miner": "0x0000000000000000000000000000000000000000",
"stateRoot": "0x155c42b3ffa9b88987b02bc8f89fb31f2b555bb8bff971d6fcd92e04a144c248",
"transactionsRoot": "0x891f5907147c83867e1e7b200b9d26fb43c3c08f81202d04235c84a2aa79f72f",
"receiptsRoot": "0x7ad169feb178baf74f7c0a12a28570bd69bd10e616acad2caea09a55fd1fb541",
"logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"difficulty": "0x2",
"number": "0xd",
"gasLimit": "0x7a1200",
"gasUsed": "0x5dc0",
"timestamp": "0x646b6e13",
"extraData": "0xd983030201846765746889676f312e31382e3130856c696e7578000000000000f942387d5a3dba7786280b806f022e2afaec53939149ac7b132b4ef1cf5cdf393d688543d984ae15b1896185ea13f9e7ae18b22b65e5ffec9128195d7cde6fa700",
"mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
"nonce": "0x0000000000000000",
"baseFeePerGas": null,
"hash": "0x09f75bc27efe18cd77a82491370442ea5a6066e910b73dc99fe1caff950c357b"
},
"transactions": [
{
"type": 126,
"nonce": 10,
"txHash": "0xed6dff31c5516b3b9d169781865276cf27501aadd45c131bf8c841c5e619e56a",
"gas": 24000,
"gasPrice": "0x0",
"from": "0x478cdd110520a8e733e2acf9e543d2c687ea5239",
"to": "0x1a258d17bf244c4df02d40343a7626a9d321e105",
"chainId": "0x0",
"value": "0x0",
"data": "0x8ef1332e000000000000000000000000ea08a65b1829af779261e768d609e59279b510f2000000000000000000000000f2ec6b6206f6208e8f9b394efc1a01c1cbde77750000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000a4232e87480000000000000000000000002b5ad5c4795c026514f8317c7a215e218dccd6cf0000000000000000000000002b5ad5c4795c026514f8317c7a215e218dccd6cf00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"isCreate": false,
"v": "0x0",
"r": "0x0",
"s": "0x0"
},
{
"type": 0,
"nonce": 11,
"txHash": "0xed6dff31c5516b3b9d169781865276cf27501aadd45c131bf8c841c5e619e56a",
"gas": 24000,
"gasPrice": "0x0",
"from": "0x478cdd110520a8e733e2acf9e543d2c687ea5239",
"to": "0x1a258d17bf244c4df02d40343a7626a9d321e105",
"chainId": "0x0",
"value": "0x0",
"data": "0x",
"isCreate": false,
"v": "0x0",
"r": "0x0",
"s": "0x0"
}
],
"storageTrace": {
"rootBefore": "0x16d403e1c55dee3e020457262414ee7a20596922c08cac631385d8ea6d6c2c2b",
"rootAfter": "0x155c42b3ffa9b88987b02bc8f89fb31f2b555bb8bff971d6fcd92e04a144c248",
"proofs": {
"0x1a258d17bF244C4dF02d40343a7626A9D321e105": [
"0x000f2d6436a450dc3daf4f111527f3e187a9641e7c5cbc4f53a386e6e4114bb8202cc33de5af63f5deca2409302103a4523463a3a16529835d526795e8966079db",
"0x0029ce00b3e5ddca3bd22d3a923b95239ed11243363803b8e1f5a89fb37ee3c6e52c0d8469864d5ee8e0d62944e8dc1de68f78b094d3ef7cf72a21b372866bab0a",
"0x001dcee8089ea21f679f1af199cc93ccb35fdea1257b9ffeac0ae5c89654a0dbce20790d9030fd3f822620f7395f1af3ca53789e7451f811c2364f2b4fa19be9fd",
"0x000d62fbf3a623b87d67d8f97132a8f1759360c03c1b78ea3654238eb6c72fd5dd0742c02437cc0294c49133a28968ba1f913963d9c2892254da675958cd4a4b2e",
"0x0026875849a967c3af8bbd7ac6efb4ef8250efaee44c8bd85ac026d541c7f509ac18ae138a98367696a39f7abe0a53fd3b32283fa843bdc4a2485d65b3b9651670",
"0x0125375fd5ae821cd3e835e2fba4ae79971635b7288d549ba8ba66bea36603686c05080000000000000000000000000000000000000000000000000867000000000000000130644e72e131a029b85045b68181585d2833e84879b9705b0e1847ce1160000030221b0e9cf191ce544dcc5c8927fd08af82cb88be110d9533468ffd2d575aed31f2125c021fb94759cb1993a2f07eae01792311e13f209441ff8969cf1eb8351cafbbe8f01ed4c292d9a27be523919a274441a076b20c7d713d192dbe6485c2201a258d17bf244c4df02d40343a7626a9d321e105000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
],
"0x478CDd110520a8e733e2ACF9e543d2c687EA5239": [
"0x000f2d6436a450dc3daf4f111527f3e187a9641e7c5cbc4f53a386e6e4114bb8202cc33de5af63f5deca2409302103a4523463a3a16529835d526795e8966079db",
"0x0029ce00b3e5ddca3bd22d3a923b95239ed11243363803b8e1f5a89fb37ee3c6e52c0d8469864d5ee8e0d62944e8dc1de68f78b094d3ef7cf72a21b372866bab0a",
"0x000bf7d923da6cc335d4074262981bf4615b43a8eb2a4dd6f2eda4fd8e1503d9311c4e63762bb10044749243a2b52db21797da53a89ba6b8ceb5cee1596150ac45",
"0x002b29daef215b12b331bf75a98e595b8a10a91928f479cca3562db3859315055a1cb697055013d78d58072071584b3e40e8d846948c8e829cbbe9915e4bcf08f0",
"0x00000000000000000000000000000000000000000000000000000000000000000007b1a84d4b19493ba2ca6a59dbc42d0e8559a7f8fb0c066bb8b1d90ceee9ce5c",
"0x0000000000000000000000000000000000000000000000000000000000000000000e9e173703b7c89f67443e861d959df35575c16617ea238fd235d8612f9020ba",
"0x0000000000000000000000000000000000000000000000000000000000000000000ea71dd32b28e075772420197e740ad0ed7990e3f6e5be7f5051f0c0709defce",
"0x000000000000000000000000000000000000000000000000000000000000000000186f00dca57567f28233cef5140efd49b1624b0ec3aef5b7f7ee42f03c3b6231",
"0x0006aac99418e9b09baea374df117e64523910d04427251eec6a9b482b6433bc54186c0fb6b2462a9c851df47ab11054dac43ed5b3f9d8d8a5fcf2fd0f9eb3e147",
"0x0109c2edb6138e8d6dc8f0b8b5ae98dd721c7053061887757f6749c484bddf92fa05080000000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a4702098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b6486420478cdd110520a8e733e2acf9e543d2c687ea5239000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
],
"0x5300000000000000000000000000000000000000": [
"0x000f2d6436a450dc3daf4f111527f3e187a9641e7c5cbc4f53a386e6e4114bb8202cc33de5af63f5deca2409302103a4523463a3a16529835d526795e8966079db",
"0x0029ce00b3e5ddca3bd22d3a923b95239ed11243363803b8e1f5a89fb37ee3c6e52c0d8469864d5ee8e0d62944e8dc1de68f78b094d3ef7cf72a21b372866bab0a",
"0x001dcee8089ea21f679f1af199cc93ccb35fdea1257b9ffeac0ae5c89654a0dbce20790d9030fd3f822620f7395f1af3ca53789e7451f811c2364f2b4fa19be9fd",
"0x000d62fbf3a623b87d67d8f97132a8f1759360c03c1b78ea3654238eb6c72fd5dd0742c02437cc0294c49133a28968ba1f913963d9c2892254da675958cd4a4b2e",
"0x0026875849a967c3af8bbd7ac6efb4ef8250efaee44c8bd85ac026d541c7f509ac18ae138a98367696a39f7abe0a53fd3b32283fa843bdc4a2485d65b3b9651670",
"0x000a3197466e4643551413444b60bbf8ab0ced04566326492fdf1993586eec3fe10000000000000000000000000000000000000000000000000000000000000000",
"0x002143f0cbad38f9696bb9c0be84281e5b517a06983edef7c75485b7a06473c97921dd9af8de7aade9fba53909b1a98ae938236ceec8ba6346ba3ba75c039194d7",
"0x0115d04fcf1fe3d9a4cc7a76b70fafcd7b9304b42108af39d9e500be391563775c0508000000000000000000000000000000000000000000000000064d000000000000000000000000000000000000000000000000000000000000000000000000000000002908ab50d1edc9dac80a344f44731acf807809c545e3388816b97a9882b5d4f974ae902ff6a84825a9cde7cc5f26e8c414e88139716c3423ed908f0a60c996011c70d94e9dc7c85d39f6877b01e59a87c057882957d9fd16c55025dfdcaa4d93205300000000000000000000000000000000000000000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
],
"0x5300000000000000000000000000000000000002": [
"0x000f2d6436a450dc3daf4f111527f3e187a9641e7c5cbc4f53a386e6e4114bb8202cc33de5af63f5deca2409302103a4523463a3a16529835d526795e8966079db",
"0x000150eaa497ee8904a3d2dc8350c03963fb1786ea5253d5cc16f321afcd862cee107e99fa497bffacfb8ab50a44b93c9a74bc7c669323c7fbd0560a657342c55a",
"0x000877a6983a09f78254ca94a086eb673296f5583aa33855bfbdbe6d2fadf0ff0107b2e01ad456a3ec4c88478c604ad6a15c6fb572259e49ef4cc781940fe1375e",
"0x0013b6a97296cf294d19f634904a7fa973d9714b90cc42e0456ad428b7278f338e0accad868d7f4aaa755b29eae6ad523415a9df210ffced28d7d33fa6d5a319b3",
"0x0011de0e672d258d43c785592fc939bc105441bafc9c1455901723358b0a73d5cc29562af63a2293f036058180ce56f5269c6a3d4d18d8e1dc75ef03cb8f51f8b9",
"0x01236b0ff4611519fb52869dd99bedcb730ebe17544687c5064da49f42f741831d05080000000000000000000000000000000000000000000000000873000000000000000000000000000000000000000000000000000000000000000000000000000000001bd955d4ef171429eb11fade67006376e84bf94630ddb9b9948c3f385ce0f05aa48c68219d344cebd30fca18d0777f587e55052ae6161c88fa4c16407211ddaa0d39d683afa3720f93c44224e2b95a5871a5a2207b5323f7fbf8f1862120ba90205300000000000000000000000000000000000002000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
],
"0x5300000000000000000000000000000000000005": [
"0x000f2d6436a450dc3daf4f111527f3e187a9641e7c5cbc4f53a386e6e4114bb8202cc33de5af63f5deca2409302103a4523463a3a16529835d526795e8966079db",
"0x0029ce00b3e5ddca3bd22d3a923b95239ed11243363803b8e1f5a89fb37ee3c6e52c0d8469864d5ee8e0d62944e8dc1de68f78b094d3ef7cf72a21b372866bab0a",
"0x000bf7d923da6cc335d4074262981bf4615b43a8eb2a4dd6f2eda4fd8e1503d9311c4e63762bb10044749243a2b52db21797da53a89ba6b8ceb5cee1596150ac45",
"0x002b29daef215b12b331bf75a98e595b8a10a91928f479cca3562db3859315055a1cb697055013d78d58072071584b3e40e8d846948c8e829cbbe9915e4bcf08f0",
"0x011facf302b106912bccc8194dff4cb12139e7f04288d3f5eefb57ccf4d842ba22050800000000000000000000000000000000000000000000000006740000000000000000000000000000000000000000000000000000000000000000002aa86921dcd2c018f4988204e816e17e42d9f9a2a468d8ca70ad453a88d3e371a0d9f743b799a6256e306f068f0847c8aab5819879b2ff45c021ce2e2f428be51be663415b1d602c49d7de76e39008575f2f090bb3e90912bad475ea8102c8565c249a75575df5205300000000000000000000000000000000000005000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
]
},
"storageProofs": {
"0x1a258d17bF244C4dF02d40343a7626A9D321e105": {
"0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103": [
"0x001914b8a8cb4d4339d89ed1d5e6cd54ec609082fdf42fadb2d4101f3214f2a2290a1746dfbdf492c00e2854b46eda6adad88ad1b0583997db4121cb7d8e6de5ca",
"0x00084878451370def5a5648862c037adb6ae24f29b9237a1823638ca29d573bdd42446af3926a42a7e8b65f9a5fdd5a00e82e4f2b9684816fdc5d52c238bef604a",
"0x00027f6e365685a83e63cde58e13d22b99c130a578178f8198d755171a2ff97bf303e187b8ea9652424a9d9dac9bc16796838b196f141c6db57136643f22b48468",
"0x00149dad479c283104bb461dcce598d82aacff80a5844d863d8f64e0d3f3e83b1a0000000000000000000000000000000000000000000000000000000000000000",
"0x001f232429e01853a7456bc8bb4cbc3a35c132f7783e2b300306bceb64a44ce81e0000000000000000000000000000000000000000000000000000000000000000",
"0x0027e1c425d61d4468534c93b8aa80c34bbdea9ec2d69df7a730ecacf0089b22640000000000000000000000000000000000000000000000000000000000000000",
"0x001f4bdfdda0df475064a0ea35302dddc6401b8c93afad9a7569afb9f2534750560000000000000000000000000000000000000000000000000000000000000000",
"0x0001fc65caf9a60abae81bcb17c4854fa114100528e73ab1e649fac03ed9fa764e304459eb829e92aa3009534c4eba916b2900783c694385d2e7f87004e7649215",
"0x01249c7b39f739f430be8e1e2cae0f1db06dfe2f8d4cc631d312d5b98efb3e7402010100000000000000000000000000008eebfef33eb00149852cadb631838ad9bfcce84820b53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
]
},
"0x5300000000000000000000000000000000000000": {
"0x0000000000000000000000000000000000000000000000000000000000000000": [
"0x0004f706d28ba7344cc73128f383e7f4df4c79f296a56e1bbc24cdfab5bc4cba5c2a970eaf68f6e47243e30bea39087adc0082afa5fd55fc5537baccd03f786953",
"0x00296af6438bc81ff661ef6d1bb16d33d6784e88ae39ff28258e56e4e72d5607052bb61b23d947a704c29df01936e7c557bf9ec541243566a336b43f8aeca37eed",
"0x001750ff1780c9b253cfcbd6274a4f79f3a95819e0856c31f0a6025e30ac3a5b261b73cc5623d88d2687f0fa6006bc823149c779b9e751477a6f2b83773062ddbe",
"0x0004c8c2bf27ee6712f4175555679ff662b9423a1d7205fe31e77999106cfb5a2f0efef64a4ef3d151d1364174e0e72745aeee51bf93fb17f8071e6daf4571a736",
"0x001de6dfed408db1b0cf580652da17c9277834302d9ee2c39ab074675ca61fd9e02ea58d0958b74734329987e16d8afa4d83a7acc46417a7f7dbc1fd42e305b394",
"0x001dd3e7dce636d92fdb4dd8b65cb4e5b8ffd3d64e54a51d93a527826bb1ec3a480000000000000000000000000000000000000000000000000000000000000000",
"0x02",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
]
},
"0x5300000000000000000000000000000000000002": {
"0x0000000000000000000000000000000000000000000000000000000000000001": [
"0x00024a2d3ee220db30dece4b39c0cffc2ba97ddded52a3f2da3aeed1f485d0a7220000000000000000000000000000000000000000000000000000000000000000",
"0x001da3cd3096ffd62c95bad392eedc1c578e7ccf248898c49c5ed82abb49a4b31a2b63c0d58a64939cf9026618503b904e267eeb0e465e15812b85485e81fb856c",
"0x01232927899d46fea05cc897a4f4671f808aa83c4eaf89396dfab15480fee91e8e010100000000000000000000000000005300000000000000000000000000000000000003200000000000000000000000000000000000000000000000000000000000000004",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
],
"0x0000000000000000000000000000000000000000000000000000000000000002": [
"0x00024a2d3ee220db30dece4b39c0cffc2ba97ddded52a3f2da3aeed1f485d0a7220000000000000000000000000000000000000000000000000000000000000000",
"0x001da3cd3096ffd62c95bad392eedc1c578e7ccf248898c49c5ed82abb49a4b31a2b63c0d58a64939cf9026618503b904e267eeb0e465e15812b85485e81fb856c",
"0x012098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864010100000000000000000000000000006f4c950442e1af093bcff730381e63ae9171b87a200000000000000000000000000000000000000000000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
],
"0x0000000000000000000000000000000000000000000000000000000000000003": [
"0x00024a2d3ee220db30dece4b39c0cffc2ba97ddded52a3f2da3aeed1f485d0a7220000000000000000000000000000000000000000000000000000000000000000",
"0x001da3cd3096ffd62c95bad392eedc1c578e7ccf248898c49c5ed82abb49a4b31a2b63c0d58a64939cf9026618503b904e267eeb0e465e15812b85485e81fb856c",
"0x012098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864010100000000000000000000000000006f4c950442e1af093bcff730381e63ae9171b87a200000000000000000000000000000000000000000000000000000000000000000",
"0x5448495320495320534f4d45204d4147494320425954455320464f5220534d54206d3172525867503278704449"
]
}
}
},
"executionResults": [
{
"gas": 24000,
"failed": true,
"returnValue": "",
"from": {
"address": "0x478cdd110520a8e733e2acf9e543d2c687ea5239",
"nonce": 10,
"balance": "0x0",
"keccakCodeHash": "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
"poseidonCodeHash": "0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
"codeSize": 0
},
"to": {
"address": "0x1a258d17bf244c4df02d40343a7626a9d321e105",
"nonce": 1,
"balance": "0x30644e72e131a029b85045b68181585d2833e84879b9705b0e1847ce11600000",
"keccakCodeHash": "0x31f2125c021fb94759cb1993a2f07eae01792311e13f209441ff8969cf1eb835",
"poseidonCodeHash": "0x1cafbbe8f01ed4c292d9a27be523919a274441a076b20c7d713d192dbe6485c2",
"codeSize": 2151
},
"accountAfter": [
{
"address": "0x478cdd110520a8e733e2acf9e543d2c687ea5239",
"nonce": 11,
"balance": "0x0",
"keccakCodeHash": "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
"poseidonCodeHash": "0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
"codeSize": 0
},
{
"address": "0x1a258d17bf244c4df02d40343a7626a9d321e105",
"nonce": 1,
"balance": "0x30644e72e131a029b85045b68181585d2833e84879b9705b0e1847ce11600000",
"keccakCodeHash": "0x31f2125c021fb94759cb1993a2f07eae01792311e13f209441ff8969cf1eb835",
"poseidonCodeHash": "0x1cafbbe8f01ed4c292d9a27be523919a274441a076b20c7d713d192dbe6485c2",
"codeSize": 2151
},
{
"address": "0x5300000000000000000000000000000000000005",
"nonce": 0,
"balance": "0x2aa86921dcd2c0",
"keccakCodeHash": "0x256e306f068f0847c8aab5819879b2ff45c021ce2e2f428be51be663415b1d60",
"poseidonCodeHash": "0x2c49d7de76e39008575f2f090bb3e90912bad475ea8102c8565c249a75575df5",
"codeSize": 1652
}
],
"poseidonCodeHash": "0x1cafbbe8f01ed4c292d9a27be523919a274441a076b20c7d713d192dbe6485c2",
"byteCode": "0x60806040526004361061004e5760003560e01c80633659cfe6146100655780634f1ef286146100855780635c60da1b146100985780638f283970146100c9578063f851a440146100e95761005d565b3661005d5761005b6100fe565b005b61005b6100fe565b34801561007157600080fd5b5061005b6100803660046106f1565b610118565b61005b61009336600461070c565b61015f565b3480156100a457600080fd5b506100ad6101d0565b6040516001600160a01b03909116815260200160405180910390f35b3480156100d557600080fd5b5061005b6100e43660046106f1565b61020b565b3480156100f557600080fd5b506100ad610235565b61010661029b565b61011661011161033a565b610344565b565b610120610368565b6001600160a01b0316336001600160a01b03161415610157576101548160405180602001604052806000815250600061039b565b50565b6101546100fe565b610167610368565b6001600160a01b0316336001600160a01b031614156101c8576101c38383838080601f0160208091040260200160405190810160405280939291908181526020018383808284376000920191909152506001925061039b915050565b505050565b6101c36100fe565b60006101da610368565b6001600160a01b0316336001600160a01b03161415610200576101fb61033a565b905090565b6102086100fe565b90565b610213610368565b6001600160a01b0316336001600160a01b0316141561015757610154816103c6565b600061023f610368565b6001600160a01b0316336001600160a01b03161415610200576101fb610368565b6060610285838360405180606001604052806027815260200161080b6027913961041a565b9392505050565b6001600160a01b03163b151590565b6102a3610368565b6001600160a01b0316336001600160a01b031614156101165760405162461bcd60e51b815260206004820152604260248201527f5472616e73706172656e745570677261646561626c6550726f78793a2061646d60448201527f696e2063616e6e6f742066616c6c6261636b20746f2070726f78792074617267606482015261195d60f21b608482015260a4015b60405180910390fd5b60006101fb6104f7565b3660008037600080366000845af43d6000803e808015610363573d6000f35b3d6000fd5b60007fb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d61035b546001600160a01b0316919050565b6103a48361051f565b6000825111806103b15750805b156101c3576103c08383610260565b50505050565b7f7e644d79422f17c01e4894b5f4f588d331ebfa28653d42ae832dc59e38c9798f6103ef610368565b604080516001600160a01b03928316815291841660208301520160405180910390a16101548161055f565b60606001600160a01b0384163b6104825760405162461bcd60e51b815260206004820152602660248201527f416464726573733a2064656c65676174652063616c6c20746f206e6f6e2d636f6044820152651b9d1c9858dd60d21b6064820152608401610331565b600080856001600160a01b03168560405161049d91906107bb565b600060405180830381855af49150503d80600081146104d8576040519150601f19603f3d011682016040523d82523d6000602084013e6104dd565b606091505b50915091506104ed828286610608565b9695505050505050565b60007f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc61038c565b61052881610641565b6040516001600160a01b038216907fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b90600090a250565b6001600160a01b0381166105c45760405162461bcd60e51b815260206004820152602660248201527f455243313936373a206e65772061646d696e20697320746865207a65726f206160448201526564647265737360d01b6064820152608401610331565b807fb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d61035b80546001600160a01b0319166001600160a01b039290921691909117905550565b60608315610617575081610285565b8251156106275782518084602001fd5b8160405162461bcd60e51b815260040161033191906107d7565b6001600160a01b0381163b6106ae5760405162461bcd60e51b815260206004820152602d60248201527f455243313936373a206e657720696d706c656d656e746174696f6e206973206e60448201526c1bdd08184818dbdb9d1c9858dd609a1b6064820152608401610331565b807f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc6105e7565b80356001600160a01b03811681146106ec57600080fd5b919050565b60006020828403121561070357600080fd5b610285826106d5565b60008060006040848603121561072157600080fd5b61072a846106d5565b9250602084013567ffffffffffffffff8082111561074757600080fd5b818601915086601f83011261075b57600080fd5b81358181111561076a57600080fd5b87602082850101111561077c57600080fd5b6020830194508093505050509250925092565b60005b838110156107aa578181015183820152602001610792565b838111156103c05750506000910152565b600082516107cd81846020870161078f565b9190910192915050565b60208152600082518060208401526107f681604085016020870161078f565b601f01601f1916919091016040019291505056fe416464726573733a206c6f772d6c6576656c2064656c65676174652063616c6c206661696c6564a2646970667358221220366737524a7ac8fa76e3b2cd04bb1e0b8aa75e165c32f59b0076ead59d529de564736f6c634300080a0033",
"structLogs": [
{
"pc": 0,
"op": "PUSH1",
"gas": 320,
"gasCost": 3,
"depth": 1
},
{
"pc": 2,
"op": "PUSH1",
"gas": 317,
"gasCost": 3,
"depth": 1,
"stack": [
"0x80"
]
},
{
"pc": 4,
"op": "MSTORE",
"gas": 314,
"gasCost": 12,
"depth": 1,
"stack": [
"0x80",
"0x40"
]
},
{
"pc": 5,
"op": "PUSH1",
"gas": 302,
"gasCost": 3,
"depth": 1
},
{
"pc": 7,
"op": "CALLDATASIZE",
"gas": 299,
"gasCost": 2,
"depth": 1,
"stack": [
"0x4"
]
},
{
"pc": 8,
"op": "LT",
"gas": 297,
"gasCost": 3,
"depth": 1,
"stack": [
"0x4",
"0x184"
]
},
{
"pc": 9,
"op": "PUSH2",
"gas": 294,
"gasCost": 3,
"depth": 1,
"stack": [
"0x0"
]
},
{
"pc": 12,
"op": "JUMPI",
"gas": 291,
"gasCost": 10,
"depth": 1,
"stack": [
"0x0",
"0x4e"
]
},
{
"pc": 13,
"op": "PUSH1",
"gas": 281,
"gasCost": 3,
"depth": 1
},
{
"pc": 15,
"op": "CALLDATALOAD",
"gas": 278,
"gasCost": 3,
"depth": 1,
"stack": [
"0x0"
]
},
{
"pc": 16,
"op": "PUSH1",
"gas": 275,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e000000000000000000000000ea08a65b1829af779261e768d609e592"
]
},
{
"pc": 18,
"op": "SHR",
"gas": 272,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e000000000000000000000000ea08a65b1829af779261e768d609e592",
"0xe0"
]
},
{
"pc": 19,
"op": "DUP1",
"gas": 269,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e"
]
},
{
"pc": 20,
"op": "PUSH4",
"gas": 266,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e"
]
},
{
"pc": 25,
"op": "EQ",
"gas": 263,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e",
"0x3659cfe6"
]
},
{
"pc": 26,
"op": "PUSH2",
"gas": 260,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0"
]
},
{
"pc": 29,
"op": "JUMPI",
"gas": 257,
"gasCost": 10,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0",
"0x65"
]
},
{
"pc": 30,
"op": "DUP1",
"gas": 247,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e"
]
},
{
"pc": 31,
"op": "PUSH4",
"gas": 244,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e"
]
},
{
"pc": 36,
"op": "EQ",
"gas": 241,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e",
"0x4f1ef286"
]
},
{
"pc": 37,
"op": "PUSH2",
"gas": 238,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0"
]
},
{
"pc": 40,
"op": "JUMPI",
"gas": 235,
"gasCost": 10,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0",
"0x85"
]
},
{
"pc": 41,
"op": "DUP1",
"gas": 225,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e"
]
},
{
"pc": 42,
"op": "PUSH4",
"gas": 222,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e"
]
},
{
"pc": 47,
"op": "EQ",
"gas": 219,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e",
"0x5c60da1b"
]
},
{
"pc": 48,
"op": "PUSH2",
"gas": 216,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0"
]
},
{
"pc": 51,
"op": "JUMPI",
"gas": 213,
"gasCost": 10,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0",
"0x98"
]
},
{
"pc": 52,
"op": "DUP1",
"gas": 203,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e"
]
},
{
"pc": 53,
"op": "PUSH4",
"gas": 200,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e"
]
},
{
"pc": 58,
"op": "EQ",
"gas": 197,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e",
"0x8f283970"
]
},
{
"pc": 59,
"op": "PUSH2",
"gas": 194,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0"
]
},
{
"pc": 62,
"op": "JUMPI",
"gas": 191,
"gasCost": 10,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0",
"0xc9"
]
},
{
"pc": 63,
"op": "DUP1",
"gas": 181,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e"
]
},
{
"pc": 64,
"op": "PUSH4",
"gas": 178,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e"
]
},
{
"pc": 69,
"op": "EQ",
"gas": 175,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x8ef1332e",
"0xf851a440"
]
},
{
"pc": 70,
"op": "PUSH2",
"gas": 172,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0"
]
},
{
"pc": 73,
"op": "JUMPI",
"gas": 169,
"gasCost": 10,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x0",
"0xe9"
]
},
{
"pc": 74,
"op": "PUSH2",
"gas": 159,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e"
]
},
{
"pc": 77,
"op": "JUMP",
"gas": 156,
"gasCost": 8,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5d"
]
},
{
"pc": 93,
"op": "JUMPDEST",
"gas": 148,
"gasCost": 1,
"depth": 1,
"stack": [
"0x8ef1332e"
]
},
{
"pc": 94,
"op": "PUSH2",
"gas": 147,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e"
]
},
{
"pc": 97,
"op": "PUSH2",
"gas": 144,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b"
]
},
{
"pc": 100,
"op": "JUMP",
"gas": 141,
"gasCost": 8,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0xfe"
]
},
{
"pc": 254,
"op": "JUMPDEST",
"gas": 133,
"gasCost": 1,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b"
]
},
{
"pc": 255,
"op": "PUSH2",
"gas": 132,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b"
]
},
{
"pc": 258,
"op": "PUSH2",
"gas": 129,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106"
]
},
{
"pc": 261,
"op": "JUMP",
"gas": 126,
"gasCost": 8,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106",
"0x29b"
]
},
{
"pc": 667,
"op": "JUMPDEST",
"gas": 118,
"gasCost": 1,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106"
]
},
{
"pc": 668,
"op": "PUSH2",
"gas": 117,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106"
]
},
{
"pc": 671,
"op": "PUSH2",
"gas": 114,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106",
"0x2a3"
]
},
{
"pc": 674,
"op": "JUMP",
"gas": 111,
"gasCost": 8,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106",
"0x2a3",
"0x368"
]
},
{
"pc": 872,
"op": "JUMPDEST",
"gas": 103,
"gasCost": 1,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106",
"0x2a3"
]
},
{
"pc": 873,
"op": "PUSH1",
"gas": 102,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106",
"0x2a3"
]
},
{
"pc": 875,
"op": "PUSH32",
"gas": 99,
"gasCost": 3,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106",
"0x2a3",
"0x0"
]
},
{
"pc": 908,
"op": "JUMPDEST",
"gas": 96,
"gasCost": 1,
"depth": 1,
"stack": [
"0x8ef1332e",
"0x5b",
"0x106",
"0x2a3",
"0x0",
"0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103"
]
},
{
"pc": 909,
"op": "SLOAD",
"gas": 95,
"gasCost": 2100,
"depth": 1,
"error": "out of gas",
"stack": [
"0x8ef1332e",
"0x5b",
"0x106",
"0x2a3",
"0x0",
"0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103"
],
"storage": {
"0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103": "0x0000000000000000000000008eebfef33eb00149852cadb631838ad9bfcce848"
},
"extraData": {
"proofList": [
{
"address": "0x1a258d17bf244c4df02d40343a7626a9d321e105",
"nonce": 1,
"balance": "0x30644e72e131a029b85045b68181585d2833e84879b9705b0e1847ce11600000",
"keccakCodeHash": "0x31f2125c021fb94759cb1993a2f07eae01792311e13f209441ff8969cf1eb835",
"poseidonCodeHash": "0x1cafbbe8f01ed4c292d9a27be523919a274441a076b20c7d713d192dbe6485c2",
"codeSize": 2151,
"storage": {
"key": "0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103",
"value": "0x0000000000000000000000008eebfef33eb00149852cadb631838ad9bfcce848"
}
}
]
}
}
]
}
],
"withdraw_trie_root": "0x0000000000000000000000000000000000000000000000000000000000000000"
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,120 +0,0 @@
package types
import (
"encoding/binary"
"fmt"
"math/big"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/crypto"
)
// BatchHeader contains batch header info to be committed.
type BatchHeader struct {
// Encoded in BatchHeaderV0Codec
version uint8
batchIndex uint64
l1MessagePopped uint64
totalL1MessagePopped uint64
dataHash common.Hash
parentBatchHash common.Hash
skippedL1MessageBitmap []byte
}
// NewBatchHeader creates a new BatchHeader
func NewBatchHeader(version uint8, batchIndex, totalL1MessagePoppedBefore uint64, parentBatchHash common.Hash, chunks []*Chunk) (*BatchHeader, error) {
// buffer for storing chunk hashes in order to compute the batch data hash
var dataBytes []byte
// skipped L1 message bitmap, an array of 256-bit bitmaps
var skippedBitmap []*big.Int
// the first queue index that belongs to this batch
baseIndex := totalL1MessagePoppedBefore
// the next queue index that we need to process
nextIndex := totalL1MessagePoppedBefore
for _, chunk := range chunks {
// build data hash
totalL1MessagePoppedBeforeChunk := nextIndex
chunkBytes, err := chunk.Hash(totalL1MessagePoppedBeforeChunk)
if err != nil {
return nil, err
}
dataBytes = append(dataBytes, chunkBytes...)
// build skip bitmap
for _, block := range chunk.Blocks {
for _, tx := range block.Transactions {
if tx.Type != 0x7E {
continue
}
currentIndex := tx.Nonce
if currentIndex < nextIndex {
return nil, fmt.Errorf("unexpected batch payload, expected queue index: %d, got: %d", nextIndex, currentIndex)
}
// mark skipped messages
for skippedIndex := nextIndex; skippedIndex < currentIndex; skippedIndex++ {
quo := int((skippedIndex - baseIndex) / 256)
rem := int((skippedIndex - baseIndex) % 256)
for len(skippedBitmap) <= quo {
bitmap := big.NewInt(0)
skippedBitmap = append(skippedBitmap, bitmap)
}
skippedBitmap[quo].SetBit(skippedBitmap[quo], rem, 1)
}
// process included message
quo := int((currentIndex - baseIndex) / 256)
for len(skippedBitmap) <= quo {
bitmap := big.NewInt(0)
skippedBitmap = append(skippedBitmap, bitmap)
}
nextIndex = currentIndex + 1
}
}
}
// compute data hash
dataHash := crypto.Keccak256Hash(dataBytes)
// compute skipped bitmap
bitmapBytes := make([]byte, len(skippedBitmap)*32)
for ii, num := range skippedBitmap {
bytes := num.Bytes()
padding := 32 - len(bytes)
copy(bitmapBytes[32*ii+padding:], bytes)
}
return &BatchHeader{
version: version,
batchIndex: batchIndex,
l1MessagePopped: nextIndex - totalL1MessagePoppedBefore,
totalL1MessagePopped: nextIndex,
dataHash: dataHash,
parentBatchHash: parentBatchHash,
skippedL1MessageBitmap: bitmapBytes,
}, nil
}
// Encode encodes the BatchHeader into RollupV2 BatchHeaderV0Codec Encoding.
func (b *BatchHeader) Encode() []byte {
batchBytes := make([]byte, 89+len(b.skippedL1MessageBitmap))
batchBytes[0] = b.version
binary.BigEndian.PutUint64(batchBytes[1:], b.batchIndex)
binary.BigEndian.PutUint64(batchBytes[9:], b.l1MessagePopped)
binary.BigEndian.PutUint64(batchBytes[17:], b.totalL1MessagePopped)
copy(batchBytes[25:], b.dataHash[:])
copy(batchBytes[57:], b.parentBatchHash[:])
copy(batchBytes[89:], b.skippedL1MessageBitmap[:])
return batchBytes
}
// Hash calculates the hash of the batch header.
func (b *BatchHeader) Hash() common.Hash {
return crypto.Keccak256Hash(b.Encode())
}

View File

@@ -1,234 +0,0 @@
package types
import (
"encoding/json"
"os"
"testing"
"github.com/scroll-tech/go-ethereum/common"
"github.com/stretchr/testify/assert"
)
func TestNewBatchHeader(t *testing.T) {
// Without L1 Msg
templateBlockTrace, err := os.ReadFile("../testdata/blockTrace_02.json")
assert.NoError(t, err)
wrappedBlock := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace, wrappedBlock))
chunk := &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock,
},
}
parentBatchHeader := &BatchHeader{
version: 1,
batchIndex: 0,
l1MessagePopped: 0,
totalL1MessagePopped: 0,
dataHash: common.HexToHash("0x0"),
parentBatchHash: common.HexToHash("0x0"),
skippedL1MessageBitmap: nil,
}
batchHeader, err := NewBatchHeader(1, 1, 0, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
assert.Equal(t, 0, len(batchHeader.skippedL1MessageBitmap))
// 1 L1 Msg in 1 bitmap
templateBlockTrace2, err := os.ReadFile("../testdata/blockTrace_04.json")
assert.NoError(t, err)
wrappedBlock2 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace2, wrappedBlock2))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock2,
},
}
batchHeader, err = NewBatchHeader(1, 1, 0, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
assert.Equal(t, 32, len(batchHeader.skippedL1MessageBitmap))
expectedBitmap := "00000000000000000000000000000000000000000000000000000000000003ff" // skip first 10
assert.Equal(t, expectedBitmap, common.Bytes2Hex(batchHeader.skippedL1MessageBitmap))
// many consecutive L1 Msgs in 1 bitmap, no leading skipped msgs
templateBlockTrace3, err := os.ReadFile("../testdata/blockTrace_05.json")
assert.NoError(t, err)
wrappedBlock3 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace3, wrappedBlock3))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock3,
},
}
batchHeader, err = NewBatchHeader(1, 1, 37, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
assert.Equal(t, uint64(5), batchHeader.l1MessagePopped)
assert.Equal(t, 32, len(batchHeader.skippedL1MessageBitmap))
expectedBitmap = "0000000000000000000000000000000000000000000000000000000000000000" // all bits are included, so none are skipped
assert.Equal(t, expectedBitmap, common.Bytes2Hex(batchHeader.skippedL1MessageBitmap))
// many consecutive L1 Msgs in 1 bitmap, with leading skipped msgs
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock3,
},
}
batchHeader, err = NewBatchHeader(1, 1, 0, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
assert.Equal(t, uint64(42), batchHeader.l1MessagePopped)
assert.Equal(t, 32, len(batchHeader.skippedL1MessageBitmap))
expectedBitmap = "0000000000000000000000000000000000000000000000000000001fffffffff" // skipped the first 37 messages
assert.Equal(t, expectedBitmap, common.Bytes2Hex(batchHeader.skippedL1MessageBitmap))
// many sparse L1 Msgs in 1 bitmap
templateBlockTrace4, err := os.ReadFile("../testdata/blockTrace_06.json")
assert.NoError(t, err)
wrappedBlock4 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace4, wrappedBlock4))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock4,
},
}
batchHeader, err = NewBatchHeader(1, 1, 0, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
assert.Equal(t, uint64(10), batchHeader.l1MessagePopped)
assert.Equal(t, 32, len(batchHeader.skippedL1MessageBitmap))
expectedBitmap = "00000000000000000000000000000000000000000000000000000000000001dd" // 0111011101
assert.Equal(t, expectedBitmap, common.Bytes2Hex(batchHeader.skippedL1MessageBitmap))
// many L1 Msgs in each of 2 bitmaps
templateBlockTrace5, err := os.ReadFile("../testdata/blockTrace_07.json")
assert.NoError(t, err)
wrappedBlock5 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace5, wrappedBlock5))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock5,
},
}
batchHeader, err = NewBatchHeader(1, 1, 0, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
assert.Equal(t, uint64(257), batchHeader.l1MessagePopped)
assert.Equal(t, 64, len(batchHeader.skippedL1MessageBitmap))
expectedBitmap = "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000"
assert.Equal(t, expectedBitmap, common.Bytes2Hex(batchHeader.skippedL1MessageBitmap))
}
func TestBatchHeaderEncode(t *testing.T) {
// Without L1 Msg
templateBlockTrace, err := os.ReadFile("../testdata/blockTrace_02.json")
assert.NoError(t, err)
wrappedBlock := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace, wrappedBlock))
chunk := &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock,
},
}
parentBatchHeader := &BatchHeader{
version: 1,
batchIndex: 0,
l1MessagePopped: 0,
totalL1MessagePopped: 0,
dataHash: common.HexToHash("0x0"),
parentBatchHash: common.HexToHash("0x0"),
skippedL1MessageBitmap: nil,
}
batchHeader, err := NewBatchHeader(1, 1, 0, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
bytes := batchHeader.Encode()
assert.Equal(t, 89, len(bytes))
assert.Equal(t, "0100000000000000010000000000000000000000000000000010a64c9bd905f8caf5d668fbda622d6558c5a42cdb4b3895709743d159c22e534136709aabc8a23aa17fbcc833da2f7857d3c2884feec9aae73429c135f94985", common.Bytes2Hex(bytes))
// With L1 Msg
templateBlockTrace2, err := os.ReadFile("../testdata/blockTrace_04.json")
assert.NoError(t, err)
wrappedBlock2 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace2, wrappedBlock2))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock2,
},
}
batchHeader, err = NewBatchHeader(1, 1, 0, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
bytes = batchHeader.Encode()
assert.Equal(t, 121, len(bytes))
assert.Equal(t, "010000000000000001000000000000000b000000000000000b457a9e90e8e51ba2de2f66c6b589540b88cf594dac7fa7d04b99cdcfecf24e384136709aabc8a23aa17fbcc833da2f7857d3c2884feec9aae73429c135f9498500000000000000000000000000000000000000000000000000000000000003ff", common.Bytes2Hex(bytes))
}
func TestBatchHeaderHash(t *testing.T) {
// Without L1 Msg
templateBlockTrace, err := os.ReadFile("../testdata/blockTrace_02.json")
assert.NoError(t, err)
wrappedBlock := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace, wrappedBlock))
chunk := &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock,
},
}
parentBatchHeader := &BatchHeader{
version: 1,
batchIndex: 0,
l1MessagePopped: 0,
totalL1MessagePopped: 0,
dataHash: common.HexToHash("0x0"),
parentBatchHash: common.HexToHash("0x0"),
skippedL1MessageBitmap: nil,
}
batchHeader, err := NewBatchHeader(1, 1, 0, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
hash := batchHeader.Hash()
assert.Equal(t, "d69da4357da0073f4093c76e49f077e21bb52f48f57ee3e1fbd9c38a2881af81", common.Bytes2Hex(hash.Bytes()))
templateBlockTrace, err = os.ReadFile("../testdata/blockTrace_03.json")
assert.NoError(t, err)
wrappedBlock2 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace, wrappedBlock2))
chunk2 := &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock2,
},
}
batchHeader2, err := NewBatchHeader(1, 2, 0, batchHeader.Hash(), []*Chunk{chunk2})
assert.NoError(t, err)
assert.NotNil(t, batchHeader2)
hash2 := batchHeader2.Hash()
assert.Equal(t, "34de600163aa745d4513113137a5b54960d13f0d3f2849e490c4b875028bf930", common.Bytes2Hex(hash2.Bytes()))
// With L1 Msg
templateBlockTrace3, err := os.ReadFile("../testdata/blockTrace_04.json")
assert.NoError(t, err)
wrappedBlock3 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace3, wrappedBlock3))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock3,
},
}
batchHeader, err = NewBatchHeader(1, 1, 0, parentBatchHeader.Hash(), []*Chunk{chunk})
assert.NoError(t, err)
assert.NotNil(t, batchHeader)
hash = batchHeader.Hash()
assert.Equal(t, "0ec9547c6645d5f0c1254e121f49e93f54525cfda5bfb2236440fb3470f48902", common.Bytes2Hex(hash.Bytes()))
}

View File

@@ -1,10 +1,6 @@
package types
import (
"encoding/binary"
"errors"
"math"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/core/types"
)
@@ -16,46 +12,3 @@ type WrappedBlock struct {
Transactions []*types.TransactionData `json:"transactions"`
WithdrawTrieRoot common.Hash `json:"withdraw_trie_root,omitempty"`
}
// NumL1Messages returns the number of L1 messages in this block.
// This number is the sum of included and skipped L1 messages.
func (w *WrappedBlock) NumL1Messages(totalL1MessagePoppedBefore uint64) uint64 {
var lastQueueIndex *uint64
for _, txData := range w.Transactions {
if txData.Type == 0x7E {
lastQueueIndex = &txData.Nonce
}
}
if lastQueueIndex == nil {
return 0
}
// note: last queue index included before this block is totalL1MessagePoppedBefore - 1
// TODO: cache results
return *lastQueueIndex - totalL1MessagePoppedBefore + 1
}
// Encode encodes the WrappedBlock into RollupV2 BlockContext Encoding.
func (w *WrappedBlock) Encode(totalL1MessagePoppedBefore uint64) ([]byte, error) {
bytes := make([]byte, 60)
if !w.Header.Number.IsUint64() {
return nil, errors.New("block number is not uint64")
}
if len(w.Transactions) > math.MaxUint16 {
return nil, errors.New("number of transactions exceeds max uint16")
}
numL1Messages := w.NumL1Messages(totalL1MessagePoppedBefore)
if numL1Messages > math.MaxUint16 {
return nil, errors.New("number of L1 messages exceeds max uint16")
}
binary.BigEndian.PutUint64(bytes[0:], w.Header.Number.Uint64())
binary.BigEndian.PutUint64(bytes[8:], w.Header.Time)
// TODO: [16:47] Currently, baseFee is 0, because we disable EIP-1559.
binary.BigEndian.PutUint64(bytes[48:], w.Header.GasLimit)
binary.BigEndian.PutUint16(bytes[56:], uint16(len(w.Transactions)))
binary.BigEndian.PutUint16(bytes[58:], uint16(numL1Messages))
return bytes, nil
}

View File

@@ -1,130 +0,0 @@
package types
import (
"encoding/binary"
"encoding/hex"
"errors"
"fmt"
"strings"
"github.com/scroll-tech/go-ethereum/common/hexutil"
"github.com/scroll-tech/go-ethereum/core/types"
"github.com/scroll-tech/go-ethereum/crypto"
)
// Chunk contains blocks to be encoded
type Chunk struct {
Blocks []*WrappedBlock `json:"blocks"`
}
// NumL1Messages returns the number of L1 messages in this chunk.
// This number is the sum of included and skipped L1 messages.
func (c *Chunk) NumL1Messages(totalL1MessagePoppedBefore uint64) uint64 {
var numL1Messages uint64
for _, block := range c.Blocks {
numL1MessagesInBlock := block.NumL1Messages(totalL1MessagePoppedBefore)
numL1Messages += numL1MessagesInBlock
totalL1MessagePoppedBefore += numL1MessagesInBlock
}
// TODO: cache results
return numL1Messages
}
// Encode encodes the Chunk into RollupV2 Chunk Encoding.
func (c *Chunk) Encode(totalL1MessagePoppedBefore uint64) ([]byte, error) {
numBlocks := len(c.Blocks)
if numBlocks > 255 {
return nil, errors.New("number of blocks exceeds 1 byte")
}
if numBlocks == 0 {
return nil, errors.New("number of blocks is 0")
}
var chunkBytes []byte
chunkBytes = append(chunkBytes, byte(numBlocks))
var l2TxDataBytes []byte
for _, block := range c.Blocks {
blockBytes, err := block.Encode(totalL1MessagePoppedBefore)
if err != nil {
return nil, fmt.Errorf("failed to encode block: %v", err)
}
totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore)
if len(blockBytes) != 60 {
return nil, fmt.Errorf("block encoding is not 60 bytes long %x", len(blockBytes))
}
chunkBytes = append(chunkBytes, blockBytes...)
// Append rlp-encoded l2Txs
for _, txData := range block.Transactions {
if txData.Type == 0x7E {
continue
}
data, _ := hexutil.Decode(txData.Data)
// right now we only support legacy tx
tx := types.NewTx(&types.LegacyTx{
Nonce: txData.Nonce,
To: txData.To,
Value: txData.Value.ToInt(),
Gas: txData.Gas,
GasPrice: txData.GasPrice.ToInt(),
Data: data,
V: txData.V.ToInt(),
R: txData.R.ToInt(),
S: txData.S.ToInt(),
})
rlpTxData, _ := tx.MarshalBinary()
var txLen [4]byte
binary.BigEndian.PutUint32(txLen[:], uint32(len(rlpTxData)))
l2TxDataBytes = append(l2TxDataBytes, txLen[:]...)
l2TxDataBytes = append(l2TxDataBytes, rlpTxData...)
}
}
chunkBytes = append(chunkBytes, l2TxDataBytes...)
return chunkBytes, nil
}
// Hash hashes the Chunk into RollupV2 Chunk Hash
func (c *Chunk) Hash(totalL1MessagePoppedBefore uint64) ([]byte, error) {
chunkBytes, err := c.Encode(totalL1MessagePoppedBefore)
if err != nil {
return nil, err
}
numBlocks := chunkBytes[0]
// concatenate block contexts
var dataBytes []byte
for i := 0; i < int(numBlocks); i++ {
// only first 58 bytes is needed
dataBytes = append(dataBytes, chunkBytes[1+60*i:60*i+59]...)
}
// concatenate l1 and l2 tx hashes
for _, block := range c.Blocks {
var l1TxHashes []byte
var l2TxHashes []byte
for _, txData := range block.Transactions {
txHash := strings.TrimPrefix(txData.TxHash, "0x")
hashBytes, err := hex.DecodeString(txHash)
if err != nil {
return nil, err
}
if txData.Type == 0x7E {
l1TxHashes = append(l1TxHashes, hashBytes...)
} else {
l2TxHashes = append(l2TxHashes, hashBytes...)
}
}
dataBytes = append(dataBytes, l1TxHashes...)
dataBytes = append(dataBytes, l2TxHashes...)
}
hash := crypto.Keccak256Hash(dataBytes).Bytes()
return hash, nil
}

View File

@@ -1,140 +0,0 @@
package types
import (
"encoding/hex"
"encoding/json"
"os"
"testing"
"github.com/stretchr/testify/assert"
)
func TestChunkEncode(t *testing.T) {
// Test case 1: when the chunk contains no blocks.
chunk := &Chunk{
Blocks: []*WrappedBlock{},
}
bytes, err := chunk.Encode(0)
assert.Nil(t, bytes)
assert.Error(t, err)
assert.Contains(t, err.Error(), "number of blocks is 0")
// Test case 2: when the chunk contains more than 255 blocks.
chunk = &Chunk{
Blocks: []*WrappedBlock{},
}
for i := 0; i < 256; i++ {
chunk.Blocks = append(chunk.Blocks, &WrappedBlock{})
}
bytes, err = chunk.Encode(0)
assert.Nil(t, bytes)
assert.Error(t, err)
assert.Contains(t, err.Error(), "number of blocks exceeds 1 byte")
// Test case 3: when the chunk contains one block.
templateBlockTrace, err := os.ReadFile("../testdata/blockTrace_02.json")
assert.NoError(t, err)
wrappedBlock := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace, wrappedBlock))
assert.Equal(t, uint64(0), wrappedBlock.NumL1Messages(0))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock,
},
}
bytes, err = chunk.Encode(0)
hexString := hex.EncodeToString(bytes)
assert.NoError(t, err)
assert.Equal(t, 299, len(bytes))
assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000000000000355418d1e81840002000000000073f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8b00000073f87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1", hexString)
// Test case 4: when the chunk contains one block with 1 L1MsgTx
templateBlockTrace2, err := os.ReadFile("../testdata/blockTrace_04.json")
assert.NoError(t, err)
wrappedBlock2 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace2, wrappedBlock2))
assert.Equal(t, uint64(11), wrappedBlock2.NumL1Messages(0)) // 0..=9 skipped, 10 included
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock2,
},
}
bytes, err = chunk.Encode(0)
hexString = hex.EncodeToString(bytes)
assert.NoError(t, err)
assert.Equal(t, 97, len(bytes))
assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a12000002000b00000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e1058080808080", hexString)
// Test case 5: when the chunk contains two blocks each with 1 L1MsgTx
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock2,
wrappedBlock2,
},
}
bytes, err = chunk.Encode(0)
hexString = hex.EncodeToString(bytes)
assert.NoError(t, err)
assert.Equal(t, 193, len(bytes))
assert.Equal(t, "02000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a12000002000b000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a12000002000000000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e105808080808000000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e1058080808080", hexString)
}
func TestChunkHash(t *testing.T) {
// Test case 1: when the chunk contains no blocks
chunk := &Chunk{
Blocks: []*WrappedBlock{},
}
bytes, err := chunk.Hash(0)
assert.Nil(t, bytes)
assert.Error(t, err)
assert.Contains(t, err.Error(), "number of blocks is 0")
// Test case 2: successfully hashing a chunk on one block
templateBlockTrace, err := os.ReadFile("../testdata/blockTrace_02.json")
assert.NoError(t, err)
wrappedBlock := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace, wrappedBlock))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock,
},
}
bytes, err = chunk.Hash(0)
hexString := hex.EncodeToString(bytes)
assert.NoError(t, err)
assert.Equal(t, "78c839dfc494396c16b40946f32b3f4c3e8c2d4bfd04aefcf235edec474482f8", hexString)
// Test case 3: successfully hashing a chunk on two blocks
templateBlockTrace1, err := os.ReadFile("../testdata/blockTrace_03.json")
assert.NoError(t, err)
wrappedBlock1 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace1, wrappedBlock1))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock,
wrappedBlock1,
},
}
bytes, err = chunk.Hash(0)
hexString = hex.EncodeToString(bytes)
assert.NoError(t, err)
assert.Equal(t, "aa9e494f72bc6965857856f0fae6916f27b2a6591c714a573b2fab46df03b8ae", hexString)
// Test case 4: successfully hashing a chunk on two blocks each with L1 and L2 txs
templateBlockTrace2, err := os.ReadFile("../testdata/blockTrace_04.json")
assert.NoError(t, err)
wrappedBlock2 := &WrappedBlock{}
assert.NoError(t, json.Unmarshal(templateBlockTrace2, wrappedBlock2))
chunk = &Chunk{
Blocks: []*WrappedBlock{
wrappedBlock2,
wrappedBlock2,
},
}
bytes, err = chunk.Hash(0)
hexString = hex.EncodeToString(bytes)
assert.NoError(t, err)
assert.Equal(t, "42967825696a129e7a83f082097aca982747480956dcaa448c9296e795c9a91a", hexString)
}

View File

@@ -5,7 +5,7 @@ import (
"runtime/debug"
)
var tag = "v3.3.7"
var tag = "v3.3.1"
var commit = func() string {
if info, ok := debug.ReadBuildInfo(); ok {

View File

@@ -1,5 +1,4 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
cd contracts
yarn lint-staged
yarn lint-staged && forge build && npx hardhat compile

View File

@@ -2,6 +2,7 @@
Note: For more comprehensive documentation, see [`./docs/`](./docs).
## Directory Structure
```
@@ -23,8 +24,10 @@ remappings.txt - "foundry dependency mappings"
...
```
## Dependencies
### Foundry
First run the command below to get foundryup, the Foundry toolchain installer:
@@ -39,22 +42,24 @@ Then, run `foundryup` in a new terminal session or after reloading your `PATH`.
Other ways to install Foundry can be found [here](https://github.com/foundry-rs/foundry#installation).
### Hardhat
```
yarn install
```
## Build
- Run `git submodule update --init --recursive` to initialise git submodules.
- Run `yarn prettier:solidity` to run linting in fix mode, will auto-format all solidity codes.
- Run `yarn prettier` to run linting in fix mode, will auto-format all typescript codes.
- Run `yarn prepare` to install the precommit linting hook
- Run `forge build` to compile contracts with foundry.
- Run `npx hardhat compile` to compile with hardhat.
- Run `forge test -vvv` to run foundry units tests. It will compile all contracts before running the unit tests.
- Run `npx hardhat test` to run integration tests. It may not compile all contracts before running, it's better to run `npx hardhat compile` first.
+ Run `git submodule update --init --recursive` to initialise git submodules.
+ Run `yarn prettier:solidity` to run linting in fix mode, will auto-format all solidity codes.
+ Run `yarn prettier` to run linting in fix mode, will auto-format all typescript codes.
+ Run `forge build` to compile contracts with foundry.
+ Run `npx hardhat compile` to compile with hardhat.
+ Run `forge test -vvv` to run foundry units tests. It will compile all contracts before running the unit tests.
+ Run `npx hardhat test` to run integration tests. It may not compile all contracts before running, it's better to run `npx hardhat compile` first.
## TODO

3
contracts/admin/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
dist

23
contracts/admin/README.md Normal file
View File

@@ -0,0 +1,23 @@
# admin cli
WIP
provides commands to generate calldata to then paste into `cast sign` or similar tools. No cast sign raw tx exists, and want to give users ability to
chose what method they sign with, so prefer not signing the tx in this cli tool.
example (hypothetical) usage:
- npm link
- admin-cli approveHash --network testnet --domain L1 --targetAddress 0x0 --targetCalldata 0x0
{
to: 0x1234,
data: 0x1234,
functionSig: "approveHash(bytes32)"
}
Flow:
- first, approve desired transaction (schedules transaction in Timelock) in SAFE with approveHash()
- second, someone collects all the signers and sends executeTransaction()
- third, someone calls execute() on the Timelock. this actually sends the transaction throught the forwarder and executes the call

11
contracts/admin/abis.sh Executable file
View File

@@ -0,0 +1,11 @@
#!/bin/bash
set -ue
# This script is used to generate the typechain artifacts for the contracts
mkdir -p abis types
cat ../artifacts/src/Safe.sol/Safe.json | jq .abi >> abis/safe.json
cat ../artifacts/src/TimelockController.sol/TimelockController.json | jq .abi >> abis/timelock.json
cat ../artifacts/src/Forwarder.sol/Forwarder.json | jq .abi >> abis/forwarder.json
npx typechain --target=ethers-v6 "abis/*.json"

2
contracts/admin/bin/index.js Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env node
require("../dist/cli.js");

57
contracts/admin/cli.ts Normal file
View File

@@ -0,0 +1,57 @@
import yargs from "yargs";
import { ethers } from "ethers";
import { DomainDeployment, getConfig } from "./config";
import { approveHash } from "./tx";
// eslint-disable-next-line no-unused-expressions
yargs
.command(
"approveHash",
"approve transaction hash in SAFE",
(yargs) =>
yargs
.options({
network: {
alias: "n",
describe: "name of network config to use, eg: {mainnet | goerli | testnet}",
string: true,
},
domain: {
describe: "L1 or L2",
string: true,
coerce: (arg) => arg.toUpperCase(),
},
targetAddress: {
describe: "address of contract to call",
string: true,
},
targetCalldata: {
describe: "calldata to send to contract",
string: true,
},
})
.check((argv) => {
if (!(argv.targetAddress && argv.targetCalldata) && !(argv.network && argv.domain)) {
throw new Error("Must provide network, domain, targetAddress and targetCalldata");
}
return true; // If no error was thrown, validation passed and you can return true
}),
async (argv) => {
// todo: validate
const targetAddress = ethers.getAddress(argv.targetAddress!);
const targetCalldata = argv.targetCalldata!;
console.log("using target value from args: ", { targetAddress, targetCalldata });
const conf = getConfig(argv.network!, argv.domain!);
const fragment = await approveHash(
targetAddress,
ethers.getBytes(targetCalldata),
conf.ScrollSafeAddress,
conf.ForwarderAddress,
conf.ScrollTimelockAddress
);
console.log(fragment);
}
)
.help().argv;

49
contracts/admin/config.ts Normal file
View File

@@ -0,0 +1,49 @@
export interface DomainDeployment {
ForwarderAddress: string;
ScrollSafeAddress: string;
ScrollTimelockAddress: string;
CouncilSafeAddress: string;
CouncilTimelockAddress: string;
}
export interface Deployment {
L1: DomainDeployment;
L2: DomainDeployment;
}
export interface Config {
[key: string]: Deployment;
}
const config: Config = {
testnet: {
L1: {
ForwarderAddress: "0x0000000000000000000000000000000000000000",
ScrollSafeAddress: "0x0000000000000000000000000000000000000000",
ScrollTimelockAddress: "0x0000000000000000000000000000000000000000",
CouncilSafeAddress: "0x0000000000000000000000000000000000000000",
CouncilTimelockAddress: "0x0000000000000000000000000000000000000000",
},
L2: {
ForwarderAddress: "0xA51c1fc2f0D1a1b8494Ed1FE312d7C3a78Ed91C0",
ScrollSafeAddress: "0xa513E6E4b8f2a923D98304ec87F64353C4D5C853",
ScrollTimelockAddress: "0x8A791620dd6260079BF849Dc5567aDC3F2FdC318",
CouncilSafeAddress: "0x0000000000000000000000000000000000000000",
CouncilTimelockAddress: "0x0000000000000000000000000000000000000000",
},
},
};
export const getConfig = (network: string, domain: string): DomainDeployment => {
if (network in config) {
if (domain in config[network]) {
return config[network][domain as keyof Deployment];
} else {
throw new Error(`Invalid domain: ${domain}`);
}
} else {
throw new Error(`Invalid network: ${network}`);
}
};

View File

@@ -0,0 +1,19 @@
{
"name": "admin-cli",
"bin": {
"admin-cli": "./bin/index.js"
},
"main": "bin/index.js",
"scripts": {
"build": "tsc",
"prepublishOnly": "npm run build"
},
"dependencies": {
"ethers": "^6.6.1",
"yargs": "^17.7.2"
},
"devDependencies": {
"@typechain/ethers-v6": "^0.4.0",
"@types/yargs": "^17.0.24"
}
}

View File

@@ -0,0 +1,10 @@
{
"compilerOptions": {
"target": "es2018",
"module": "commonjs",
"strict": true,
"esModuleInterop": true,
"outDir": "dist",
"declaration": true
}
}

113
contracts/admin/tx.ts Normal file
View File

@@ -0,0 +1,113 @@
import { ethers } from "ethers";
import {
Safe__factory,
Safe,
Forwarder__factory,
Forwarder,
Timelock__factory,
Timelock,
} from "./types/ethers-contracts";
export interface RawTxFragment {
to: string;
callData: string;
functionSig: string;
}
async function execTransaction(wallet: ethers.Wallet, safeContract: Safe, calldata: string, senders: string[]) {
// ethers.AbiCoder.encode(
// Safe__factory.abi
let signatures = "0x0000000000000000000000000000000000000000";
for (let i = 0; i < senders.length; i++) {
signatures += encodeAddress(senders[i]);
}
await safeContract
.connect(wallet)
.execTransaction(
"0x0000000000000000000000000000000000000000",
0,
calldata,
0,
0,
0,
0,
ethers.ZeroAddress,
ethers.ZeroAddress,
signatures,
{ gasLimit: 1000000 }
);
}
export async function approveHash(
targetAddress: ethers.AddressLike,
targetCalldata: ethers.BytesLike,
safeAddress: ethers.AddressLike,
forwarderAddress: ethers.AddressLike,
timelockAddress: ethers.AddressLike
): Promise<RawTxFragment> {
// either implement getTransactionHash in JS or make RPC call to get hash
const provider = new ethers.JsonRpcProvider("http://localhost:1234");
const safeContract = Safe__factory.connect(safeAddress.toString(), provider);
const forwarderContract = Forwarder__factory.connect(forwarderAddress.toString());
const timelockContract = Timelock__factory.connect(timelockAddress.toString());
// const targetCalldata = targetContract.interface.encodeFunctionData("err");
const forwarderCalldata = forwarderContract.interface.encodeFunctionData("forward", [
targetAddress.toString(),
targetCalldata,
]);
const timelockScheduleCalldata = timelockContract.interface.encodeFunctionData("schedule", [
forwarderAddress.toString(),
0,
forwarderCalldata,
ethers.ZeroHash,
ethers.ZeroHash,
0,
]);
const txHash = await safeContract.getTransactionHash(
timelockAddress.toString(),
0,
timelockScheduleCalldata,
0,
0,
0,
0,
ethers.ZeroAddress,
ethers.ZeroAddress,
0
);
return {
to: safeAddress.toString(),
callData: txHash,
functionSig: "approveHash(bytes32)",
};
}
// await safeContract.checkNSignatures(scheduleSafeTxHash, ethers.arrayify("0x00"), sigSchedule, 1);
// await timelockContract
// .connect(wallet)
// .execute(L2_FORWARDER_ADDR, 0, forwarderCalldata, ethers.HashZero, ethers.HashZero, {
// gasLimit: 1000000,
// });
// safe takes address as part of the signature
function encodeAddress(address: string) {
const r = ethers.zeroPadValue(address, 32);
const s = ethers.zeroPadValue("0x00", 32);
const v = "0x01";
return ethers.toBeHex(ethers.concat([r, s, v])).slice(-2);
}
// add 4 to the v byte at the end of the signature
function editSig(sig: string) {
const v = parseInt(sig.slice(-2), 16);
const newV = v + 4;
const newSig = sig.slice(0, -2) + newV.toString(16);
return newSig;
}
console.log(encodeAddress("0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266"));
module.exports = {
approveHash,
};

View File

@@ -63,6 +63,28 @@ Initialize the storage of L1ScrollMessenger.
| _rollup | address | The address of ScrollChain contract. |
| _messageQueue | address | The address of L1MessageQueue contract. |
### isL1MessageRelayed
```solidity
function isL1MessageRelayed(bytes32) external view returns (bool)
```
Mapping from relay id to relay status.
#### Parameters
| Name | Type | Description |
|---|---|---|
| _0 | bytes32 | undefined |
#### Returns
| Name | Type | Description |
|---|---|---|
| _0 | bool | undefined |
### isL1MessageSent
```solidity

View File

@@ -3,7 +3,7 @@ src = 'src' # the source directory
test = 'src/test' # the test directory
script = 'scripts' # the script directory
out = 'artifacts/src' # the output directory (for artifacts)
libs = [] # a list of library directories
libs = ["lib"] # the library directory
remappings = [] # a list of remappings
libraries = [] # a list of deployed libraries to link against
cache = true # whether to cache builds or not

View File

@@ -2,16 +2,7 @@
/* eslint-disable node/no-missing-import */
import { expect } from "chai";
import { BigNumber, constants } from "ethers";
import {
concat,
getAddress,
hexlify,
keccak256,
randomBytes,
RLP,
stripZeros,
TransactionTypes,
} from "ethers/lib/utils";
import { concat, getAddress, hexlify, keccak256, randomBytes, RLP } from "ethers/lib/utils";
import { ethers } from "hardhat";
import { L1MessageQueue, L2GasPriceOracle } from "../typechain";
import { SignerWithAddress } from "@nomiclabs/hardhat-ethers/signers";
@@ -103,8 +94,8 @@ describe("L1MessageQueue", async () => {
context("#computeTransactionHash", async () => {
it("should succeed", async () => {
const sender = "0xb2a70fab1a45b1b9be443b6567849a1702bc1232";
const target = "0xcb18150e4efefb6786130e289a5f61a82a5b86d7";
const sender = hexlify(randomBytes(20));
const target = hexlify(randomBytes(20));
const transactionType = "0x7E";
for (const nonce of [
@@ -132,30 +123,19 @@ describe("L1MessageQueue", async () => {
constants.MaxUint256,
]) {
for (const dataLen of [0, 1, 2, 3, 4, 55, 56, 100]) {
const tests = [randomBytes(dataLen)];
if (dataLen === 1) {
for (const byte of [0, 1, 127, 128]) {
tests.push(Uint8Array.from([byte]));
}
}
for (const data of tests) {
const transactionPayload = RLP.encode([
stripZeros(nonce.toHexString()),
stripZeros(gasLimit.toHexString()),
target,
stripZeros(value.toHexString()),
data,
sender,
]);
const payload = concat([transactionType, transactionPayload]);
const expectedHash = keccak256(payload);
const computedHash = await queue.computeTransactionHash(sender, nonce, value, target, gasLimit, data);
if (computedHash !== expectedHash) {
console.log(hexlify(transactionPayload));
console.log(nonce, gasLimit, target, value, data, sender);
}
expect(expectedHash).to.eq(computedHash);
}
const data = randomBytes(dataLen);
const transactionPayload = RLP.encode([
nonce.toHexString(),
gasLimit.toHexString(),
target,
value.toHexString(),
data,
sender,
]);
const payload = concat([transactionType, transactionPayload]);
const expectedHash = keccak256(payload);
const computedHash = await queue.computeTransactionHash(sender, nonce, value, target, gasLimit, data);
expect(expectedHash).to.eq(computedHash);
}
}
}

View File

@@ -11,8 +11,7 @@
"lint:sol": "./node_modules/.bin/prettier --write 'src/**/*.sol'",
"lint:ts": "./node_modules/.bin/prettier --write 'integration-test/**/*.ts' 'scripts/**/*.ts' *.ts",
"lint": "yarn lint:ts && yarn lint:sol",
"coverage": "hardhat coverage",
"prepare": "cd .. && husky install contracts/.husky"
"coverage": "hardhat coverage"
},
"devDependencies": {
"@nomiclabs/hardhat-ethers": "^2.0.0",

26
contracts/scripts/deploy.sh Executable file
View File

@@ -0,0 +1,26 @@
#/bin/sh
set -uex
PID=$(lsof -t -i:1234)
echo $PID
kill $PID
export L2_DEPLOYER_PRIVATE_KEY=0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80
PORT=1234
# deploys a local instance of the contracts
anvil --port $PORT &
while ! lsof -i :$PORT
do
echo "...waiting for anvil"
sleep 1
done
echo "started anvil"
forge script ./foundry/DeployL2AdminContracts.s.sol:DeployL2AdminContracts --rpc-url http://localhost:1234 --legacy --broadcast -vvvv
npx ts-node ./encode.ts
echo "deployment success"

74
contracts/scripts/encode.sh Executable file
View File

@@ -0,0 +1,74 @@
#/bin/sh
set -uex
# does not work due to V recovery bit being off
L2_COUNCIL_SAFE_ADDR=0xe7f1725E7734CE288F8367e1Bb143E90bb3F0512
L2_COUNCIL_TIMELOCK_ADDR=0xCf7Ed3AccA5a467e9e704C703E8D87F634fB0Fc9
L2_SCROLL_SAFE_ADDR=0xa513E6E4b8f2a923D98304ec87F64353C4D5C853
L2_SCROLL_TIMELOCK_ADDR=0x8A791620dd6260079BF849Dc5567aDC3F2FdC318
L2_FORWARDER_ADDR=0xA51c1fc2f0D1a1b8494Ed1FE312d7C3a78Ed91C0
L2_TARGET_ADDR=0x0DCd1Bf9A1b36cE34237eEaFef220932846BCD82
# 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266
L2_DEPLOYER_PRIVATE_KEY=0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80
ZERO_BYTES=0x0000000000000000000000000000000000000000
# sign tx hash for timelock schedule call
ADMIN_CALLDATA=$(cast calldata "err()")
FORWARDER_CALLDATA=$(cast calldata "forward(address,bytes)" $L2_FORWARDER_ADDR $ADMIN_CALLDATA)
TIMELOCK_SCHEDULE_CALLDATA=$(cast calldata "schedule(address,uint256,bytes,bytes32,bytes32,uint256)" $L2_FORWARDER_ADDR 0 $FORWARDER_CALLDATA 0x0 0x0 0x0)
SAFE_TX_HASH=$(cast call -r http://localhost:1234 $L2_SCROLL_SAFE_ADDR "getTransactionHash(address,uint256,bytes,uint8,uint256,uint256,uint256,address,address,uint256)" \
$L2_SCROLL_TIMELOCK_ADDR 0 $TIMELOCK_SCHEDULE_CALLDATA 0 0 0 0 $ZERO_BYTES $ZERO_BYTES 0)
SAFE_SIG=$(cast wallet sign --private-key $L2_DEPLOYER_PRIVATE_KEY $SAFE_TX_HASH | awk '{print $2}')
# echo $SAFE_SIG
# echo $SAFE_TX_HASH
# send safe tx to schedule the call
cast send -c 31337 --legacy --private-key $L2_DEPLOYER_PRIVATE_KEY -r http://localhost:1234 --gas-limit 1000000 $L2_SCROLL_SAFE_ADDR "execTransaction(address,uint256,bytes,uint8,uint256,uint256,uint256,address,address,bytes)" \
$L2_SCROLL_TIMELOCK_ADDR 0 $TIMELOCK_SCHEDULE_CALLDATA 0 0 0 0 $ZERO_BYTES $ZERO_BYTES $SAFE_SIG
# function encodeTransactionData(
# address to,
# uint256 value,
# bytes calldata data,
# Enum.Operation operation,
# uint256 safeTxGas,
# uint256 baseGas,
# uint256 gasPrice,
# address gasToken,
# address refundReceiver,
# uint256 _nonce
# function execTransaction(
# address to,
# uint256 value,
# bytes calldata data,
# Enum.Operation operation,
# uint256 safeTxGas,
# uint256 baseGas,
# uint256 gasPrice,
# address gasToken,
# address payable refundReceiver,
# bytes memory signatures
exit 0
# /////////////// 2nd tx ///////////////
# sign tx hash for execute call
TIMELOCK_EXECUTE_CALLDATA=$(cast calldata "execute(address,uint256,bytes,bytes32,bytes32)" $L2_FORWARDER_ADDR 0 $FORWARDER_CALLDATA 0x0 0x0)
SAFE_TX_HASH_=$(cast call -r http://localhost:1234 $L2_SCROLL_SAFE_ADDR "getTransactionHash(address,uint256,bytes,uint8,uint256,uint256,uint256,address,address,uint256)" \
$L2_SCROLL_TIMELOCK_ADDR 0 $TIMELOCK_SCHEDULE_CALLDATA 0 0 0 0 $ZERO_BYTES $ZERO_BYTES 0)
SAFE_SIG=$(cast wallet sign --private-key $L2_DEPLOYER_PRIVATE_KEY $SAFE_TX_HASH | awk '{print $2}')
# send safe tx to execute the call
cast send -c 31337 --legacy --private-key $L2_DEPLOYER_PRIVATE_KEY -r http://localhost:1234 --gas-limit 1000000 $L2_SCROLL_SAFE_ADDR "execTransaction(address,uint256,bytes,uint8,uint256,uint256,uint256,address,address,bytes)" \
$L2_SCROLL_TIMELOCK_ADDR 0 $TIMELOCK_EXECUTE_CALLDATA 0 0 0 0 $ZERO_BYTES $ZERO_BYTES $SAFE_SIG
echo "DONE"

102
contracts/scripts/encode.ts Normal file
View File

@@ -0,0 +1,102 @@
import { ethers } from "ethers";
import { Safeabi__factory, Forwarder__factory, Target__factory, Timelock__factory } from "../safeAbi";
const L2_SCROLL_SAFE_ADDR = "0xa513E6E4b8f2a923D98304ec87F64353C4D5C853";
const L2_SCROLL_TIMELOCK_ADDR = "0x8A791620dd6260079BF849Dc5567aDC3F2FdC318";
const L2_FORWARDER_ADDR = "0xA51c1fc2f0D1a1b8494Ed1FE312d7C3a78Ed91C0";
const L2_TARGET_ADDR = "0x0DCd1Bf9A1b36cE34237eEaFef220932846BCD82";
const L2_DEPLOYER_PRIVATE_KEY = "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80";
/*
TODO:
* read from env
* use approve hash flow
* read nonce from safe
* split script into schedule and execute
* add gas limit
* document how to use
* how to get addresses from deployment?
* get abis in a reasonable way
*/
/*
to get safe abi
* forge build
* cat artifacts/src/Safe.sol/Safe.json| jq .abi >> safeabi.json
* mkdir safeAbi
* npx typechain --target=ethers-v5 safeabi.json --out-dir safeAbi
repeat for forwarder, timelock, target
*/
async function main() {
const provider = new ethers.providers.JsonRpcProvider("http://localhost:1234");
const wallet = new ethers.Wallet(L2_DEPLOYER_PRIVATE_KEY, provider);
const safeContract = Safeabi__factory.connect(L2_SCROLL_SAFE_ADDR, provider);
const forwarderContract = Forwarder__factory.connect(L2_FORWARDER_ADDR, provider);
const timelockContract = Timelock__factory.connect(L2_SCROLL_TIMELOCK_ADDR, provider);
const targetContract = Target__factory.connect(L2_TARGET_ADDR, provider);
const targetCalldata = targetContract.interface.encodeFunctionData("err");
const forwarderCalldata = forwarderContract.interface.encodeFunctionData("forward", [L2_TARGET_ADDR, targetCalldata]);
const timelockScheduleCalldata = timelockContract.interface.encodeFunctionData("schedule", [
L2_FORWARDER_ADDR,
0,
forwarderCalldata,
ethers.constants.HashZero,
ethers.constants.HashZero,
0,
]);
const scheduleSafeTxHash = await safeContract.getTransactionHash(
L2_SCROLL_TIMELOCK_ADDR,
0,
timelockScheduleCalldata,
0,
0,
0,
0,
ethers.constants.AddressZero,
ethers.constants.AddressZero,
0
);
const sigRawSchedule = await wallet.signMessage(ethers.utils.arrayify(scheduleSafeTxHash));
const sigSchedule = editSig(sigRawSchedule);
await safeContract.checkNSignatures(scheduleSafeTxHash, ethers.utils.arrayify("0x00"), sigSchedule, 1);
await safeContract
.connect(wallet)
.execTransaction(
L2_SCROLL_TIMELOCK_ADDR,
0,
timelockScheduleCalldata,
0,
0,
0,
0,
ethers.constants.AddressZero,
ethers.constants.AddressZero,
sigSchedule,
{ gasLimit: 1000000 }
);
console.log("scheduled");
await timelockContract
.connect(wallet)
.execute(L2_FORWARDER_ADDR, 0, forwarderCalldata, ethers.constants.HashZero, ethers.constants.HashZero, {
gasLimit: 1000000,
});
}
// add 4 to the v byte at the end of the signature
function editSig(sig: string) {
const v = parseInt(sig.slice(-2), 16);
const newV = v + 4;
const newSig = sig.slice(0, -2) + newV.toString(16);
return newSig;
}
main();

View File

@@ -1,27 +0,0 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";
import {Fallback} from "../../src/misc/Fallback.sol";
contract DeployFallbackContracts is Script {
uint256 DEPLOYER_PRIVATE_KEY = vm.envUint("DEPLOYER_PRIVATE_KEY");
uint256 NUM_CONTRACTS = vm.envUint("NUM_CONTRACTS");
function run() external {
vm.startBroadcast(DEPLOYER_PRIVATE_KEY);
for (uint256 ii = 0; ii < NUM_CONTRACTS; ++ii) {
Fallback fallbackContract = new Fallback();
logAddress("FALLBACK", address(fallbackContract));
}
vm.stopBroadcast();
}
function logAddress(string memory name, address addr) internal view {
console.log(string(abi.encodePacked(name, "=", vm.toString(address(addr)))));
}
}

View File

@@ -0,0 +1,76 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";
import {Safe} from "safe-contracts/Safe.sol";
import {TimelockController} from "@openzeppelin/contracts/governance/TimelockController.sol";
import {Forwarder} from "../../src/misc/Forwarder.sol";
contract DeployL1AdminContracts is Script {
uint256 L1_DEPLOYER_PRIVATE_KEY = vm.envUint("L1_DEPLOYER_PRIVATE_KEY");
function run() external {
vm.startBroadcast(L1_DEPLOYER_PRIVATE_KEY);
address council_safe = deploySafe();
// deploy timelock with no delay just to have flow between council and scroll admin
address council_timelock = deployTimelockController(council_safe, 0);
logAddress("L1_COUNCIL_SAFE_ADDR", address(council_safe));
logAddress("L1_COUNCIL_TIMELOCK_ADDR", address(council_timelock));
address scroll_safe = deploySafe();
// TODO: get timelock delay from env. for now just use 2 days
address scroll_timelock = deployTimelockController(scroll_safe, 2 days);
logAddress("L1_SCROLL_SAFE_ADDR", address(scroll_safe));
logAddress("L1_SCROLL_TIMELOCK_ADDR", address(scroll_timelock));
address forwarder = deployForwarder(address(council_safe), address(scroll_safe));
logAddress("L1_FORWARDER_ADDR", address(forwarder));
vm.stopBroadcast();
}
function deployForwarder(address admin, address superAdmin) internal returns (address) {
Forwarder forwarder = new Forwarder(admin, superAdmin);
return address(forwarder);
}
function deploySafe() internal returns (address) {
address owner = vm.addr(L1_DEPLOYER_PRIVATE_KEY);
// TODO: get safe signers from env
Safe safe = new Safe();
address[] memory owners = new address[](1);
owners[0] = owner;
// deployer 1/1. no gas refunds for now
safe.setup(owners, 1, address(0), new bytes(0), address(0), address(0), 0, payable(address(0)));
return address(safe);
}
function deployTimelockController(address safe, uint256 delay) internal returns (address) {
address deployer = vm.addr(L1_DEPLOYER_PRIVATE_KEY);
address[] memory proposers = new address[](1);
proposers[0] = safe;
// add SAFE as the only proposer, anyone can execute
address[] memory executors = new address[](1);
executors[0] = deployer;
TimelockController timelock = new TimelockController(delay, proposers, executors);
bytes32 TIMELOCK_ADMIN_ROLE = keccak256("TIMELOCK_ADMIN_ROLE");
// make safe admin of timelock, then revoke deployer's rights
timelock.grantRole(TIMELOCK_ADMIN_ROLE, address(safe));
timelock.revokeRole(TIMELOCK_ADMIN_ROLE, deployer);
return address(timelock);
}
function logAddress(string memory name, address addr) internal view {
console.log(string(abi.encodePacked(name, "=", vm.toString(address(addr)))));
}
}

View File

@@ -4,8 +4,8 @@ pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";
import {ProxyAdmin} from "@openzeppelin/contracts/proxy/transparent/ProxyAdmin.sol";
import {TransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol";
import {TimelockController} from "@openzeppelin/contracts/governance/TimelockController.sol";
import {L1CustomERC20Gateway} from "../../src/L1/gateways/L1CustomERC20Gateway.sol";
import {L1ERC1155Gateway} from "../../src/L1/gateways/L1ERC1155Gateway.sol";
@@ -22,6 +22,7 @@ import {L2GasPriceOracle} from "../../src/L1/rollup/L2GasPriceOracle.sol";
import {ScrollChain} from "../../src/L1/rollup/ScrollChain.sol";
import {Whitelist} from "../../src/L2/predeploys/Whitelist.sol";
contract DeployL1BridgeContracts is Script {
uint256 L1_DEPLOYER_PRIVATE_KEY = vm.envUint("L1_DEPLOYER_PRIVATE_KEY");
@@ -30,14 +31,14 @@ contract DeployL1BridgeContracts is Script {
address L1_WETH_ADDR = vm.envAddress("L1_WETH_ADDR");
address L2_WETH_ADDR = vm.envAddress("L2_WETH_ADDR");
ProxyAdmin proxyAdmin;
// scroll admin (timelocked) or security council
address FORWARDER = vm.envAddress("L1_FORWARDER");
function run() external {
vm.startBroadcast(L1_DEPLOYER_PRIVATE_KEY);
// note: the RollupVerifier library is deployed implicitly
deployProxyAdmin();
deployL1Whitelist();
deployL1MessageQueue();
deployL2GasPriceOracle();
@@ -55,12 +56,6 @@ contract DeployL1BridgeContracts is Script {
vm.stopBroadcast();
}
function deployProxyAdmin() internal {
proxyAdmin = new ProxyAdmin();
logAddress("L1_PROXY_ADMIN_ADDR", address(proxyAdmin));
}
function deployL1Whitelist() internal {
address owner = vm.addr(L1_DEPLOYER_PRIVATE_KEY);
Whitelist whitelist = new Whitelist(owner);
@@ -72,7 +67,7 @@ contract DeployL1BridgeContracts is Script {
ScrollChain impl = new ScrollChain(CHAIN_ID_L2);
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -84,7 +79,7 @@ contract DeployL1BridgeContracts is Script {
L1MessageQueue impl = new L1MessageQueue();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
logAddress("L1_MESSAGE_QUEUE_IMPLEMENTATION_ADDR", address(impl));
@@ -95,7 +90,7 @@ contract DeployL1BridgeContracts is Script {
L2GasPriceOracle impl = new L2GasPriceOracle();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
logAddress("L2_GAS_PRICE_ORACLE_IMPLEMENTATION_ADDR", address(impl));
@@ -106,7 +101,7 @@ contract DeployL1BridgeContracts is Script {
L1StandardERC20Gateway impl = new L1StandardERC20Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -118,7 +113,7 @@ contract DeployL1BridgeContracts is Script {
L1ETHGateway impl = new L1ETHGateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -130,7 +125,7 @@ contract DeployL1BridgeContracts is Script {
L1WETHGateway impl = new L1WETHGateway(L1_WETH_ADDR, L2_WETH_ADDR);
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -142,7 +137,7 @@ contract DeployL1BridgeContracts is Script {
L1GatewayRouter impl = new L1GatewayRouter();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -154,7 +149,7 @@ contract DeployL1BridgeContracts is Script {
L1ScrollMessenger impl = new L1ScrollMessenger();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -166,7 +161,7 @@ contract DeployL1BridgeContracts is Script {
EnforcedTxGateway impl = new EnforcedTxGateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -178,7 +173,7 @@ contract DeployL1BridgeContracts is Script {
L1CustomERC20Gateway impl = new L1CustomERC20Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -190,7 +185,7 @@ contract DeployL1BridgeContracts is Script {
L1ERC721Gateway impl = new L1ERC721Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -202,7 +197,7 @@ contract DeployL1BridgeContracts is Script {
L1ERC1155Gateway impl = new L1ERC1155Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);

View File

@@ -0,0 +1,114 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";
import {Safe} from "safe-contracts/Safe.sol";
import {SafeProxy} from "safe-contracts/proxies/SafeProxy.sol";
import {TimelockController} from "@openzeppelin/contracts/governance/TimelockController.sol";
import {Forwarder} from "../../src/misc/Forwarder.sol";
import {MockTarget} from "../../src/mocks/MockTarget.sol";
interface ISafe {
function setup(
address[] calldata _owners,
uint256 _threshold,
address to,
bytes calldata data,
address fallbackHandler,
address paymentToken,
uint256 payment,
address payable paymentReceiver
) external;
}
contract DeployL2AdminContracts is Script {
uint256 L2_DEPLOYER_PRIVATE_KEY = vm.envUint("L2_DEPLOYER_PRIVATE_KEY");
function run() external {
vm.startBroadcast(L2_DEPLOYER_PRIVATE_KEY);
address council_safe = deploySafe();
// deploy timelock with no delay, just to keep council and scroll admin flows be parallel
address council_timelock = deployTimelockController(council_safe, 0);
logAddress("L2_COUNCIL_SAFE_ADDR", address(council_safe));
logAddress("L2_COUNCIL_TIMELOCK_ADDR", address(council_timelock));
address scroll_safe = deploySafe();
// TODO: get timelock delay from env. for now just use 0
address scroll_timelock = deployTimelockController(scroll_safe, 0);
logAddress("L2_SCROLL_SAFE_ADDR", address(scroll_safe));
logAddress("L2_SCROLL_TIMELOCK_ADDR", address(scroll_timelock));
address forwarder = deployForwarder(address(council_timelock), address(scroll_timelock));
logAddress("L1_FORWARDER_ADDR", address(forwarder));
MockTarget target = new MockTarget();
logAddress("L2_TARGET_ADDR", address(target));
vm.stopBroadcast();
}
function deployForwarder(address admin, address superAdmin) internal returns (address) {
Forwarder forwarder = new Forwarder(admin, superAdmin);
return address(forwarder);
}
function deploySafe() internal returns (address) {
address owner = vm.addr(L2_DEPLOYER_PRIVATE_KEY);
// TODO: get safe signers from env
Safe safe = new Safe();
SafeProxy proxy = new SafeProxy(address(safe));
address[] memory owners = new address[](1);
owners[0] = owner;
// deployer 1/1. no gas refunds for now
ISafe(address(proxy)).setup(
owners,
1,
address(0),
new bytes(0),
address(0),
address(0),
0,
payable(address(0))
);
return address(proxy);
}
function deployTimelockController(address safe, uint256 delay) internal returns (address) {
address deployer = vm.addr(L2_DEPLOYER_PRIVATE_KEY);
address[] memory proposers = new address[](1);
proposers[0] = safe;
address[] memory executors = new address[](1);
executors[0] = address(0);
// add SAFE as the only proposer, anyone can execute
TimelockController timelock = new TimelockController(delay, proposers, executors);
bytes32 TIMELOCK_ADMIN_ROLE = keccak256("TIMELOCK_ADMIN_ROLE");
// make safe admin of timelock, then revoke deployer's rights
timelock.grantRole(TIMELOCK_ADMIN_ROLE, address(safe));
timelock.revokeRole(TIMELOCK_ADMIN_ROLE, deployer);
return address(timelock);
}
function logBytes32(string memory name, bytes32 value) internal view {
console.log(string(abi.encodePacked(name, "=", vm.toString(bytes32(value)))));
}
function logUint(string memory name, uint256 value) internal view {
console.log(string(abi.encodePacked(name, "=", vm.toString(uint256(value)))));
}
function logAddress(string memory name, address addr) internal view {
console.log(string(abi.encodePacked(name, "=", vm.toString(address(addr)))));
}
}

View File

@@ -4,7 +4,6 @@ pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";
import {ProxyAdmin} from "@openzeppelin/contracts/proxy/transparent/ProxyAdmin.sol";
import {TransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol";
import {L2CustomERC20Gateway} from "../../src/L2/gateways/L2CustomERC20Gateway.sol";
@@ -30,10 +29,12 @@ contract DeployL2BridgeContracts is Script {
address L1_WETH_ADDR = vm.envAddress("L1_WETH_ADDR");
address L2_WETH_ADDR = vm.envAddress("L2_WETH_ADDR");
// scroll admin (timelocked) or security council
address FORWARDER = vm.envAddress("L2_FORWARDER");
L1GasPriceOracle oracle;
L1BlockContainer container;
L2MessageQueue queue;
ProxyAdmin proxyAdmin;
// predeploy contracts
address L1_BLOCK_CONTAINER_PREDEPLOY_ADDR = vm.envOr("L1_BLOCK_CONTAINER_PREDEPLOY_ADDR", address(0));
@@ -53,7 +54,6 @@ contract DeployL2BridgeContracts is Script {
deployL2Whitelist();
// upgradable
deployProxyAdmin();
deployL2ScrollMessenger();
deployL2ETHGateway();
deployL2WETHGateway();
@@ -130,17 +130,11 @@ contract DeployL2BridgeContracts is Script {
logAddress("L2_WHITELIST_ADDR", address(whitelist));
}
function deployProxyAdmin() internal {
proxyAdmin = new ProxyAdmin();
logAddress("L2_PROXY_ADMIN_ADDR", address(proxyAdmin));
}
function deployL2ScrollMessenger() internal {
L2ScrollMessenger impl = new L2ScrollMessenger(address(container), address(oracle), address(queue));
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -152,7 +146,7 @@ contract DeployL2BridgeContracts is Script {
L2StandardERC20Gateway impl = new L2StandardERC20Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -164,7 +158,7 @@ contract DeployL2BridgeContracts is Script {
L2ETHGateway impl = new L2ETHGateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -176,7 +170,7 @@ contract DeployL2BridgeContracts is Script {
L2WETHGateway impl = new L2WETHGateway(L2_WETH_ADDR, L1_WETH_ADDR);
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -188,7 +182,7 @@ contract DeployL2BridgeContracts is Script {
L2GatewayRouter impl = new L2GatewayRouter();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -208,7 +202,7 @@ contract DeployL2BridgeContracts is Script {
L2CustomERC20Gateway impl = new L2CustomERC20Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -220,7 +214,7 @@ contract DeployL2BridgeContracts is Script {
L2ERC721Gateway impl = new L2ERC721Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -232,7 +226,7 @@ contract DeployL2BridgeContracts is Script {
L2ERC1155Gateway impl = new L2ERC1155Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);

View File

@@ -19,7 +19,7 @@ async function main() {
console.log("Using rollup proxy address:", rollupAddr);
const ScrollChain = await ethers.getContractAt("ScrollChain", rollupAddr, deployer);
const genesis = JSON.parse(fs.readFileSync(GENESIS_FILE_PATH, "utf8"));
const genesis = JSON.parse(fs.readFileSync(GENESIS_FILE_PATH, 'utf8'));
console.log("Using genesis block:", genesis.blockHash);
const tx = await ScrollChain.importGenesisBatch(genesis);

View File

@@ -26,16 +26,16 @@ async function main() {
const L2StandardERC20FactoryAddress = process.env.L2_SCROLL_STANDARD_ERC20_FACTORY_ADDR!;
// if ((await L1StandardERC20Gateway.counterpart()) === constants.AddressZero) {
const tx = await L1StandardERC20Gateway.initialize(
L2StandardERC20GatewayAddress,
L1GatewayRouterAddress,
L1ScrollMessengerAddress,
L2StandardERC20Impl,
L2StandardERC20FactoryAddress
);
console.log("initialize L1StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
const tx = await L1StandardERC20Gateway.initialize(
L2StandardERC20GatewayAddress,
L1GatewayRouterAddress,
L1ScrollMessengerAddress,
L2StandardERC20Impl,
L2StandardERC20FactoryAddress
);
console.log("initialize L1StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
// }
}

View File

@@ -24,14 +24,14 @@ async function main() {
const L2GatewayRouterAddress = process.env.L2_GATEWAY_ROUTER_PROXY_ADDR!;
// if ((await L1GatewayRouter.counterpart()) === constants.AddressZero) {
const tx = await L1GatewayRouter.initialize(
L1StandardERC20GatewayAddress,
L2GatewayRouterAddress,
L1ScrollMessengerAddress
);
console.log("initialize L1StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
const tx = await L1GatewayRouter.initialize(
L1StandardERC20GatewayAddress,
L2GatewayRouterAddress,
L1ScrollMessengerAddress
);
console.log("initialize L1StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
// }
}

View File

@@ -22,10 +22,10 @@ async function main() {
const ZKRollupAddress = addressFile.get("ZKRollup.proxy");
// if ((await L1ScrollMessenger.rollup()) === constants.AddressZero) {
const tx = await L1ScrollMessenger.initialize(ZKRollupAddress);
console.log("initialize L1StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
const tx = await L1ScrollMessenger.initialize(ZKRollupAddress);
console.log("initialize L1StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
// }
}

View File

@@ -25,15 +25,15 @@ async function main() {
const L1StandardERC20GatewayAddress = process.env.L1_STANDARD_ERC20_GATEWAY_PROXY_ADDR!;
// if ((await L2StandardERC20Gateway.counterpart()) === constants.AddressZero) {
const tx = await L2StandardERC20Gateway.initialize(
L1StandardERC20GatewayAddress,
L2GatewayRouterAddress,
L2ScrollMessengerAddress,
L2StandardERC20FactoryAddress
);
console.log("initialize L2StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
const tx = await L2StandardERC20Gateway.initialize(
L1StandardERC20GatewayAddress,
L2GatewayRouterAddress,
L2ScrollMessengerAddress,
L2StandardERC20FactoryAddress
);
console.log("initialize L2StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
// }
}

View File

@@ -24,14 +24,14 @@ async function main() {
const L1GatewayRouterAddress = process.env.L1_GATEWAY_ROUTER_PROXY_ADDR!;
// if ((await L2GatewayRouter.counterpart()) === constants.AddressZero) {
const tx = await L2GatewayRouter.initialize(
L2StandardERC20GatewayAddress,
L1GatewayRouterAddress,
L2ScrollMessengerAddress
);
console.log("initialize L1StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
const tx = await L2GatewayRouter.initialize(
L2StandardERC20GatewayAddress,
L1GatewayRouterAddress,
L2ScrollMessengerAddress
);
console.log("initialize L1StandardERC20Gateway, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
// }
}

View File

@@ -21,10 +21,10 @@ async function main() {
const L2StandardERC20GatewayAddress = addressFile.get("L2StandardERC20Gateway.proxy");
// if ((await ScrollStandardERC20Factory.owner()) !== L2StandardERC20GatewayAddress) {
const tx = await ScrollStandardERC20Factory.transferOwnership(L2StandardERC20GatewayAddress);
console.log("transfer ownernship ScrollStandardERC20Factory, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
const tx = await ScrollStandardERC20Factory.transferOwnership(L2StandardERC20GatewayAddress);
console.log("transfer ownernship ScrollStandardERC20Factory, hash:", tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
// }
}

View File

@@ -10,7 +10,9 @@ export function selectAddressFile(network: string) {
}
let filename: string;
if (["hardhat", "l1geth", "l2geth"].includes(network)) {
if (["hardhat",
"l1geth", "l2geth",
].includes(network)) {
filename = path.join(CONFIG_FILE_DIR, `${network}.json`);
} else {
throw new Error(`network ${network} not supported yet`);

View File

@@ -30,6 +30,9 @@ contract L1ScrollMessenger is PausableUpgradeable, ScrollMessengerBase, IL1Scrol
* Variables *
*************/
/// @notice Mapping from relay id to relay status.
mapping(bytes32 => bool) public isL1MessageRelayed;
/// @notice Mapping from L1 message hash to sent status.
mapping(bytes32 => bool) public isL1MessageSent;
@@ -42,6 +45,28 @@ contract L1ScrollMessenger is PausableUpgradeable, ScrollMessengerBase, IL1Scrol
/// @notice The address of L1MessageQueue contract.
address public messageQueue;
// @note move to ScrollMessengerBase in next big refactor
/// @dev The status of for non-reentrant check.
uint256 private _lock_status;
/**********************
* Function Modifiers *
**********************/
modifier nonReentrant() {
// On the first call to nonReentrant, _notEntered will be true
require(_lock_status != _ENTERED, "ReentrancyGuard: reentrant call");
// Any calls to nonReentrant after this point will fail
_lock_status = _ENTERED;
_;
// By storing the original value once again, a refund is triggered (see
// https://eips.ethereum.org/EIPS/eip-2200)
_lock_status = _NOT_ENTERED;
}
/***************
* Constructor *
***************/
@@ -137,6 +162,9 @@ contract L1ScrollMessenger is PausableUpgradeable, ScrollMessengerBase, IL1Scrol
} else {
emit FailedRelayedMessage(_xDomainCalldataHash);
}
bytes32 _relayId = keccak256(abi.encodePacked(_xDomainCalldataHash, msg.sender, block.number));
isL1MessageRelayed[_relayId] = true;
}
/// @inheritdoc IL1ScrollMessenger

View File

@@ -22,32 +22,4 @@ interface IL1GatewayRouter is IL1ETHGateway, IL1ERC20Gateway {
/// @param token The address of token updated.
/// @param gateway The corresponding address of gateway updated.
event SetERC20Gateway(address indexed token, address indexed gateway);
/*************************
* Public View Functions *
*************************/
/// @notice Return the corresponding gateway address for given token address.
/// @param _token The address of token to query.
function getERC20Gateway(address _token) external view returns (address);
/************************
* Restricted Functions *
************************/
/// @notice Update the address of ETH gateway contract.
/// @dev This function should only be called by contract owner.
/// @param _ethGateway The address to update.
function setETHGateway(address _ethGateway) external;
/// @notice Update the address of default ERC20 gateway contract.
/// @dev This function should only be called by contract owner.
/// @param _defaultERC20Gateway The address to update.
function setDefaultERC20Gateway(address _defaultERC20Gateway) external;
/// @notice Update the mapping from token address to gateway address.
/// @dev This function should only be called by contract owner.
/// @param _tokens The list of addresses of tokens to update.
/// @param _gateways The list of addresses of gateways to update.
function setERC20Gateway(address[] memory _tokens, address[] memory _gateways) external;
}

View File

@@ -68,7 +68,8 @@ contract L1GatewayRouter is OwnableUpgradeable, IL1GatewayRouter {
return IL1ERC20Gateway(_gateway).getL2ERC20Address(_l1Address);
}
/// @inheritdoc IL1GatewayRouter
/// @notice Return the corresponding gateway address for given token address.
/// @param _token The address of token to query.
function getERC20Gateway(address _token) public view returns (address) {
address _gateway = ERC20Gateway[_token];
if (_gateway == address(0)) {
@@ -177,21 +178,28 @@ contract L1GatewayRouter is OwnableUpgradeable, IL1GatewayRouter {
* Restricted Functions *
************************/
/// @inheritdoc IL1GatewayRouter
/// @notice Update the address of ETH gateway contract.
/// @dev This function should only be called by contract owner.
/// @param _ethGateway The address to update.
function setETHGateway(address _ethGateway) external onlyOwner {
ethGateway = _ethGateway;
emit SetETHGateway(_ethGateway);
}
/// @inheritdoc IL1GatewayRouter
/// @notice Update the address of default ERC20 gateway contract.
/// @dev This function should only be called by contract owner.
/// @param _defaultERC20Gateway The address to update.
function setDefaultERC20Gateway(address _defaultERC20Gateway) external onlyOwner {
defaultERC20Gateway = _defaultERC20Gateway;
emit SetDefaultERC20Gateway(_defaultERC20Gateway);
}
/// @inheritdoc IL1GatewayRouter
/// @notice Update the mapping from token address to gateway address.
/// @dev This function should only be called by contract owner.
/// @param _tokens The list of addresses of tokens to update.
/// @param _gateways The list of addresses of gateways to update.
function setERC20Gateway(address[] memory _tokens, address[] memory _gateways) external onlyOwner {
require(_tokens.length == _gateways.length, "length mismatch");

Some files were not shown because too many files have changed in this diff Show More