Compare commits

..

50 Commits

Author SHA1 Message Date
dependabot[bot]
6f26114f23 build(deps): bump @openzeppelin/contracts from 4.9.2 to 4.9.3 in /contracts (#936)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-09-12 10:38:01 +08:00
Xi Lin
56150da353 docs(contracts): OZ-L03 Missing Docstrings (#940)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-09-12 10:27:54 +08:00
Péter Garamvölgyi
fc53572334 fix: mark used storage slots in L2ScrollMessenger (#941) 2023-09-11 15:35:18 +02:00
georgehao
0730e91292 feat(observability):add ready&health check api for k8s (#938)
Co-authored-by: georgehao <georgehao@users.noreply.github.com>
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
2023-09-11 19:22:19 +08:00
Xi Lin
4d3ff66446 fix(contracts): OZ-L02 Lack of gap Variable (#929)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-09-11 11:06:09 +08:00
Xi Lin
ae1cb30ed1 fix(contracts): OZ-M01 L2USDCGateway Is Missing Rate Limiter Functionality (#927)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-09-11 10:47:14 +08:00
Xi Lin
ad14836796 fix(contracts): Zellic 3.2 Addtional checks could be performed (#892)
Co-authored-by: zimpha <zimpha@users.noreply.github.com>
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-09-11 10:36:54 +08:00
HAOYUatHZ
9a5517472f perf(db): add idx for chunk_index and batch_index (#937) 2023-09-11 10:03:17 +08:00
colin
1c7490a88e refactor(rollup-relayer): change minGasLimit to fallbackGasLimit (#939)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
2023-09-08 13:25:40 -07:00
Xi Lin
7559dc42b4 feat(contacts): use bitmap for skipped messages (#893)
Co-authored-by: zimpha <zimpha@users.noreply.github.com>
2023-09-07 14:31:06 +08:00
ChuhanJin
49b72bd4e4 docs(bridge-history-api): add readme contents (#922)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
Co-authored-by: HAOYUatHZ <haoyu@protonmail.com>
2023-09-07 13:32:43 +08:00
colin
8c41b0b86b test(rollup-relayer): add chunk and batch proposer limit tests (#932)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
2023-09-07 13:29:08 +08:00
HAOYUatHZ
cd456ee3db feat(prover, coordinator): update vk handling logic (#931)
Co-authored-by: Steven Gu <asongala@163.com>
Co-authored-by: HAOYUatHZ <HAOYUatHZ@users.noreply.github.com>
2023-09-07 10:59:47 +08:00
Steven
2b6a3c9874 feat: libzkp v0.8.1 (#894)
Co-authored-by: silathdiir <silathdiir@users.noreply.github.com>
Co-authored-by: HAOYUatHZ <haoyu@protonmail.com>
2023-09-07 10:34:35 +08:00
colin
d39db01c5b fix(coordinator): decrease active attempts zero check (#934)
Co-authored-by: HAOYUatHZ <HAOYUatHZ@users.noreply.github.com>
2023-09-06 18:22:25 +08:00
colin
90e3dddf91 fix: bump version (#933) 2023-09-06 16:08:41 +08:00
georgehao
c6cc51bcfd feat(coordinator): prover task assigned multiple prover (#871)
Co-authored-by: georgehao <georgehao@users.noreply.github.com>
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
Co-authored-by: colinlyguo <colinlyguo@scroll.io>
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-09-06 15:28:31 +08:00
Xi Lin
0b0b84a513 fix(contracts): OZ-N01 Unused Imports (#930)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-09-06 13:34:29 +08:00
gsovereignty
83c8071123 docs: fix link in readme (#926)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-09-06 12:00:07 +08:00
Xi Lin
e35de74495 docs(contracts): OZ-L01 Misleading Comment (#928) 2023-09-06 11:59:35 +08:00
Ahmed Castro
33089b829f docs: add code-of-conduct and contributing documents (#722)
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
Co-authored-by: Haichen Shen <shenhaichen@gmail.com>
2023-09-05 08:16:31 -07:00
colin
8e27052b36 fix(l1-watcher): soft delete blocks when reorg (#923)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
2023-09-05 16:32:48 +08:00
colin
25b956f9b5 fix(gas-oracle): fetch base fee from the latest L1 block (#920)
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
2023-09-05 15:21:06 +08:00
colin
f4663fd249 feat(rollup-relayer): add number of blocks per chunk limit (#880)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
Co-authored-by: HAOYUatHZ <HAOYUatHZ@users.noreply.github.com>
2023-09-05 15:00:16 +08:00
Péter Garamvölgyi
c71fa5a5fc style: use MAX_TX_IN_CHUNK env name in deployment scripts (#921) 2023-09-04 14:35:04 +02:00
Péter Garamvölgyi
4af3834e36 test(fee-vault): add new test testCantWithdrawMoreThanBalance (#918)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-09-04 08:15:26 +02:00
Haichen Shen
9983585bdd docs: Update readme (#919) 2023-09-04 13:56:43 +08:00
Xi Lin
d288b34536 fix(contracts): OZ-L02 Anyone Can Steal ERC-20 Tokens From GasSwap (#844)
Co-authored-by: Haichen Shen <shenhaichen@gmail.com>
Co-authored-by: icemelon <icemelon@users.noreply.github.com>
Co-authored-by: zimpha <zimpha@users.noreply.github.com>
2023-09-04 12:31:41 +08:00
Haichen Shen
a2fe246551 docs(rollup): improve readme (#917)
Co-authored-by: HAOYUatHZ <haoyu@protonmail.com>
2023-09-02 23:23:17 +08:00
Péter Garamvölgyi
8699a22fa3 feat(contracts): allow setting withdraw amount in fee vault (#912)
Co-authored-by: Haichen Shen <shenhaichen@gmail.com>
2023-09-02 02:23:53 -07:00
Xi Lin
d668180e9a fix(contracts): OZ-M04 Use of Non-Production-Ready Trusted Forwarder (#843)
Co-authored-by: Haichen Shen <shenhaichen@gmail.com>
Co-authored-by: icemelon <icemelon@users.noreply.github.com>
2023-09-02 02:23:31 -07:00
HAOYUatHZ
d3c2e34650 docs(rollup): fix README (#916) 2023-09-02 15:54:26 +08:00
HAOYUatHZ
38551c4eeb refactor: rename bridge to rollup (#644)
Co-authored-by: HAOYUatHZ <HAOYUatHZ@users.noreply.github.com>
2023-09-02 15:42:01 +08:00
HAOYUatHZ
82330621ce docs(): update README 2023-09-02 15:08:37 +08:00
maskpp
237d048ce6 feat(prover): update l2geth version and enable use compression when get trace (#915)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
Co-authored-by: HAOYUatHZ <HAOYUatHZ@users.noreply.github.com>
2023-09-02 14:38:47 +08:00
Haichen Shen
38b440b1fa docs(contract): Update readme (#378)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
Co-authored-by: Orest Tarasiuk <830847+OrestTa@users.noreply.github.com>
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
2023-09-01 20:22:09 -07:00
Péter Garamvölgyi
0ed3d68fc3 ci: remove message_relayer Github Action (#914) 2023-09-01 19:44:36 +02:00
Nazarii Denha
2847d02498 refactor(bridge): remove message-relayer and related code (#670)
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
Co-authored-by: NazariiDenha <NazariiDenha@users.noreply.github.com>
Co-authored-by: colinlyguo <colinlyguo@scroll.io>
2023-09-01 19:04:09 +02:00
Xi Lin
a7d9bf3488 fix(contracts): OZ-N05 Lack of Logs on Sensitive Actions (#908)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-09-01 22:55:11 +08:00
Xi Lin
255eb5f307 fix(contracts): OZ-L02 Utilizing Deprecated Function From Library (#902)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-09-01 22:42:20 +08:00
Haichen Shen
0955382aec fix(ci): enable the bump-version bot when PR is updated (#905)
Co-authored-by: icemelon <icemelon@users.noreply.github.com>
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-09-01 22:41:23 +08:00
Xi Lin
07961f751e fix(contracts): OZ-N01 Variable Cast is Unnecessary (#903)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-09-01 22:18:17 +08:00
Xi Lin
bbd5a5a9c2 fix(contracts): OZ-N02 Incorrect Function Visibility (#904)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-09-01 21:58:47 +08:00
Xi Lin
2e1f42fcb6 fix(contracts): OZ-N04 Implicit Type Casting (#907)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-09-01 19:45:35 +08:00
Xi Lin
bcdbe1f119 fix(contracts): OZ-N07 Naming Issue (#910)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-09-01 19:29:57 +08:00
Xi Lin
6034c43bb1 fix(contracts): OZ-N06 Unpinned Compiler Version (#909) 2023-09-01 19:20:10 +08:00
colin
f9da81d587 feat(rollup-relayer): use db gas estimation as min gas limit (#901)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
Co-authored-by: Xi Lin <zimpha@gmail.com>
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-09-01 14:54:57 +08:00
Xi Lin
38f64e70b7 feat(contracts): bench test for ScrollChain (#721)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
Co-authored-by: HAOYUatHZ <HAOYUatHZ@users.noreply.github.com>
Co-authored-by: Haichen Shen <shenhaichen@gmail.com>
Co-authored-by: zimpha <zimpha@users.noreply.github.com>
Co-authored-by: icemelon <icemelon@users.noreply.github.com>
2023-08-31 23:52:50 -07:00
Haichen Shen
44b924170a fix(ci): don't trigger CI for version change (#898)
Co-authored-by: icemelon <icemelon@users.noreply.github.com>
2023-08-31 23:06:32 -07:00
georgehao
227f09a2cf feat(coordinator): pretty log (#900)
Co-authored-by: georgehao <georgehao@users.noreply.github.com>
Co-authored-by: HAOYUatHZ <haoyu@protonmail.com>
2023-09-01 12:56:03 +08:00
245 changed files with 4382 additions and 2665 deletions

View File

@@ -20,7 +20,7 @@ Your PR title must follow [conventional commits](https://www.conventionalcommits
### Deployment tag versioning
Has `tag` in `common/version.go` been updated?
Has `tag` in `common/version.go` been updated or have you added `bump-version` label to this PR?
- [ ] No, this PR doesn't involve a new deployment, git tag, docker image tag
- [ ] Yes

View File

@@ -1,16 +1,18 @@
name: Bump Version
name: Bump version
on:
pull_request:
branches: [develop]
branches: [ develop ]
types:
- opened
- reopened
- synchronize
- ready_for_review
- labeled
jobs:
try-to-bump:
if: contains(github.event.pull_request.labels.*.name, 'bump-version')
runs-on: ubuntu-latest
steps:
- name: Checkout code

View File

@@ -9,6 +9,7 @@ on:
- alpha
paths:
- 'common/**'
- '!common/version/version.go'
- '.github/workflows/common.yml'
pull_request:
types:
@@ -18,6 +19,7 @@ on:
- ready_for_review
paths:
- 'common/**'
- '!common/version/version.go'
- '.github/workflows/common.yml'
jobs:

View File

@@ -10,6 +10,7 @@ on:
paths:
- 'coordinator/**'
- 'common/**'
- '!common/version/version.go'
- 'database/**'
- '.github/workflows/coordinator.yml'
pull_request:
@@ -21,6 +22,7 @@ on:
paths:
- 'coordinator/**'
- 'common/**'
- '!common/version/version.go'
- 'database/**'
- '.github/workflows/coordinator.yml'

View File

@@ -10,6 +10,7 @@ on:
paths:
- 'database/**'
- 'common/**'
- '!common/version/version.go'
- '.github/workflows/database.yml'
pull_request:
types:
@@ -20,6 +21,7 @@ on:
paths:
- 'database/**'
- 'common/**'
- '!common/version/version.go'
- '.github/workflows/database.yml'
jobs:

View File

@@ -48,27 +48,6 @@ jobs:
tags: scrolltech/gas-oracle:${{github.ref_name}}
# cache-from: type=gha,scope=${{ github.workflow }}
# cache-to: type=gha,scope=${{ github.workflow }}
msg_relayer:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push msg_relayer docker
uses: docker/build-push-action@v2
with:
context: .
file: ./build/dockerfiles/msg_relayer.Dockerfile
push: true
tags: scrolltech/msg-relayer:${{github.ref_name}}
# cache-from: type=gha,scope=${{ github.workflow }}
# cache-to: type=gha,scope=${{ github.workflow }}
rollup_relayer:
runs-on: ubuntu-latest
steps:

View File

@@ -36,7 +36,7 @@ jobs:
- name: Build prerequisites
run: |
make dev_docker
make -C bridge mock_abi
make -C rollup mock_abi
make -C common/bytecode all
- name: Run integration tests
run: |

View File

@@ -1,4 +1,4 @@
name: Bridge
name: Rollup
on:
push:
@@ -8,10 +8,11 @@ on:
- develop
- alpha
paths:
- 'bridge/**'
- 'rollup/**'
- 'common/**'
- '!common/version/version.go'
- 'database/**'
- '.github/workflows/bridge.yml'
- '.github/workflows/rollup.yml'
pull_request:
types:
- opened
@@ -19,10 +20,11 @@ on:
- synchronize
- ready_for_review
paths:
- 'bridge/**'
- 'rollup/**'
- 'common/**'
- '!common/version/version.go'
- 'database/**'
- '.github/workflows/bridge.yml'
- '.github/workflows/rollup.yml'
jobs:
check:
@@ -44,7 +46,7 @@ jobs:
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
- name: Lint
working-directory: 'bridge'
working-directory: 'rollup'
run: |
rm -rf $HOME/.cache/golangci-lint
make mock_abi
@@ -62,14 +64,14 @@ jobs:
- name: Install goimports
run: go install golang.org/x/tools/cmd/goimports
- name: Run goimports lint
run: goimports -local scroll-tech/bridge/ -w .
working-directory: 'bridge'
run: goimports -local scroll-tech/rollup/ -w .
working-directory: 'rollup'
- name: Run go mod tidy
run: go mod tidy
working-directory: 'bridge'
working-directory: 'rollup'
# If there are any diffs from goimports or go mod tidy, fail.
- name: Verify no changes from goimports and go mod tidy
working-directory: 'bridge'
working-directory: 'rollup'
run: |
if [ -n "$(git status --porcelain)" ]; then
exit 1
@@ -95,13 +97,13 @@ jobs:
- name: Build prerequisites
run: |
make dev_docker
make -C bridge mock_abi
- name: Build bridge binaries
working-directory: 'bridge'
make -C rollup mock_abi
- name: Build rollup binaries
working-directory: 'rollup'
run: |
make bridge_bins
- name: Test bridge packages
working-directory: 'bridge'
make rollup_bins
- name: Test rollup packages
working-directory: 'rollup'
run: |
go test -v -race -gcflags="-l" -ldflags="-s=false" -coverprofile=coverage.txt -covermode=atomic ./...
- name: Upload coverage reports to Codecov
@@ -109,7 +111,7 @@ jobs:
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: bridge
flags: rollup
# docker-build:
# if: github.event.pull_request.draft == false
# runs-on: ubuntu-latest

17
.gitignore vendored
View File

@@ -1,9 +1,22 @@
.idea
# Asset files
assets/params*
assets/seed
coverage.txt
# Built binaries
build/bin
coverage.txt
*.integration.txt
# Visual Studio Code
.vscode
# IntelliJ
.idea
# MacOS
.DS_Store
# misc
sftp-config.json
*~

128
CODE_OF_CONDUCT.md Normal file
View File

@@ -0,0 +1,128 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
contributor@scroll.io.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.

42
CONTRIBUTING.md Normal file
View File

@@ -0,0 +1,42 @@
## Contributing
[fork]: /fork
[pr]: /compare
[style]: https://standardjs.com/
[code-of-conduct]: CODE_OF_CONDUCT.md
Hi there! We're thrilled that you'd like to contribute to this project. Your help is essential for keeping it great.
Please note that this project is released with a [Contributor Code of Conduct][code-of-conduct]. By participating in this project you agree to abide by its terms.
## Contribute to Scroll
Did you know there are many ways of contributing to Scroll? If you are looking to contribute to by adding Scroll to existing Dev Tools or by doing integrations please go to the [Contribute to Scroll](https://github.com/scroll-tech/contribute-to-scroll) repo instead. If you are looking to contribute to Scroll's Halo2 zkEVM circuits please refer to the [zkEVM circuits](https://github.com/scroll-tech/zkevm-circuits) repo. This repository covers the Scroll infrastructure and smart contracts, if you want to contribute to these areas continue reading this document.
## Issues and PRs
If you have suggestions for how this project could be improved, or want to report a bug, open an issue! We'd love all and any contributions. If you have questions, too, we'd love to hear them.
We'd also love PRs. If you're thinking of a large PR, we advise opening up an issue first to talk about it, though! Look at the links below if you're not sure how to open a PR.
## Submitting a pull request
1. [Fork][fork] and clone the repository.
1. Create a new branch: `git checkout -b my-branch-name`.
1. Make your change, add tests, and make sure the tests still pass.
1. Push to your fork and [submit a pull request][pr].
1. Pat yourself on the back and wait for your pull request to be reviewed and merged.
Here are a few things you can do that will increase the likelihood of your pull request being accepted:
- Write and update tests.
- Keep your changes as focused as possible. If there are multiple changes you would like to make that are not dependent upon each other, consider submitting them as separate pull requests.
- Write a [good commit message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
Work in Progress pull requests are also welcome to get feedback early on, or if there is something that blocked you.
## Resources
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
- [Using Pull Requests](https://help.github.com/articles/about-pull-requests/)
- [GitHub Help](https://help.github.com)

View File

@@ -1,6 +1,6 @@
.PHONY: check update dev_docker build_test_docker run_test_docker clean
L2GETH_TAG=scroll-v4.3.34
L2GETH_TAG=scroll-v4.3.55
help: ## Display this help message
@grep -h \
@@ -8,7 +8,7 @@ help: ## Display this help message
awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
lint: ## The code's format and security checks.
make -C bridge lint
make -C rollup lint
make -C common lint
make -C coordinator lint
make -C database lint
@@ -17,7 +17,7 @@ lint: ## The code's format and security checks.
update: ## update dependencies
go work sync
cd $(PWD)/bridge/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG} && go mod tidy
cd $(PWD)/rollup/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG} && go mod tidy
cd $(PWD)/bridge-history-api/ && go get -u github.com/ethereum/go-ethereum@latest && go mod tidy
cd $(PWD)/common/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG} && go mod tidy
cd $(PWD)/coordinator/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG} && go mod tidy
@@ -25,7 +25,7 @@ update: ## update dependencies
cd $(PWD)/prover/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG} && go mod tidy
cd $(PWD)/prover-stats-api/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG} && go mod tidy
cd $(PWD)/tests/integration-test/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG} && go mod tidy
goimports -local $(PWD)/bridge/ -w .
goimports -local $(PWD)/rollup/ -w .
goimports -local $(PWD)/bridge-history-api/ -w .
goimports -local $(PWD)/common/ -w .
goimports -local $(PWD)/coordinator/ -w .

View File

@@ -1,7 +1,34 @@
# Scroll Monorepo
[![rollup](https://github.com/scroll-tech/scroll/actions/workflows/rollup.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/rollup.yml)
[![contracts](https://github.com/scroll-tech/scroll/actions/workflows/contracts.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/contracts.yml)
[![bridge-history](https://github.com/scroll-tech/scroll/actions/workflows/bridge_history_api.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/bridge_history_api.yml)
[![coordinator](https://github.com/scroll-tech/scroll/actions/workflows/coordinator.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/coordinator.yml)
[![prover](https://github.com/scroll-tech/scroll/actions/workflows/prover.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/prover.yml)
[![integration](https://github.com/scroll-tech/scroll/actions/workflows/integration.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/integration.yml)
[![codecov](https://codecov.io/gh/scroll-tech/scroll/branch/develop/graph/badge.svg?token=VJVHNQWGGW)](https://codecov.io/gh/scroll-tech/scroll)
<a href="https://scroll.io">Scroll</a> is a zkRollup Layer 2 dedicated to enhance Ethereum scalability through a bytecode-equivalent [zkEVM](https://github.com/scroll-tech/zkevm-circuits) circuit. This monorepo encompasses essential infrastructure components of the Scroll protocol. It contains the L1 and L2 contracts, the rollup node, the prover client, and the prover coordinator.
## Directory Structure
<pre>
├── <a href="./bridge-history-api/">bridge-history-api</a>: Bridge history service that collects deposit and withdraw events from both L1 and L2 chain and generates withdrawal proofs
├── <a href="./common/">common</a>: Common libraries and types
├── <a href="./coordinator/">coordinator</a>: Prover coordinator service that dispatches proving tasks to provers
├── <a href="./database">database</a>: Database client and schema definition
├── <a href="./src">l2geth</a>: Scroll execution node
├── <a href="./prover">prover</a>: Prover client that runs proof generation for zkEVM circuit and aggregation circuit
├── <a href="./prover-stats-api">prover-stats-api</a>: Collect and show prover statistics
├── <a href="./rollup">rollup</a>: Rollup-related services
├── <a href="./rpc-gateway">rpc-gateway</a>: RPC gateway external repo
└── <a href="./tests">tests</a>: Integration tests
</pre>
## Contributing
We welcome community contributions to this repository. Before you submit any issues or PRs, please read the [Code of Conduct](CODE_OF_CONDUCT.md) and the [Contribution Guideline](CONTRIBUTING.md).
## Prerequisites
+ Go 1.19
+ Rust (for version, see [rust-toolchain](./common/libzkp/impl/rust-toolchain))
@@ -15,14 +42,14 @@ docker pull postgres
make dev_docker
```
## Testing Bridge & Coordinator
## Testing Rollup & Coordinator
### For Non-Apple Silicon (M1/M2) Macs
Run the tests using the following commands:
```bash
go test -v -race -covermode=atomic scroll-tech/bridge/...
go test -v -race -covermode=atomic scroll-tech/rollup/...
go test -tags="mock_verifier" -v -race -covermode=atomic scroll-tech/coordinator/...
go test -v -race -covermode=atomic scroll-tech/database/...
go test -v -race -covermode=atomic scroll-tech/common/...
@@ -55,7 +82,7 @@ This command runs a Docker container named `scroll_test_container` from the `scr
Once the Docker container is running, execute the tests using the following commands:
```bash
go test -v -race -covermode=atomic scroll-tech/bridge/...
go test -v -race -covermode=atomic scroll-tech/rollup/...
go test -tags="mock_verifier" -v -race -covermode=atomic scroll-tech/coordinator/...
go test -v -race -covermode=atomic scroll-tech/database/...
go test -v -race -covermode=atomic scroll-tech/common/...
@@ -63,6 +90,10 @@ go test -v -race -covermode=atomic scroll-tech/common/...
## Testing Contracts
You can find the unit tests in [`<REPO_DIR>/contracts/src/test/`](/contracts/src/test/), and integration tests in [`<REPO_DIR>/contracts/integration-test/`](/contracts/integration-test/).
You can find the unit tests in [`contracts/src/test/`](/contracts/src/test/), and integration tests in [`contracts/integration-test/`](/contracts/integration-test/).
For more details on contracts, see [`/contracts`](/contracts).
See [`contracts`](/contracts) for more details on the contracts.
## License
Scroll Monorepo is licensed under the [MIT](./LICENSE) license.

View File

@@ -1 +1,82 @@
# bridge-history-api
This directory contains the `bridge-history-api` service that provides REST APIs to query txs interact with Scroll official bridge contracts
## Instructions
The bridge-history-api contains three distinct components
### bridgehistoryapi-db-cli
Provide init, show version, rollback, check status services of DB
```
cd ./bridge-history-api
make bridgehistoryapi-db-cli
./build/bin/bridgehistoryapi-db-cli [command]
```
### bridgehistoryapi-cross-msg-fetcher
Fetch the transactions from both l1 and l2
```
cd ./bridge-history-api
make bridgehistoryapi-cross-msg-fetcher
./build/bin/bridgehistoryapi-cross-msg-fetcher
```
### bridgehistoryapi-server
provides REST APIs. Please refer to the API details below.
```
cd ./bridge-history-api
make bridgehistoryapi-server
./build/bin/bridgehistoryapi-server
```
## APIs provided by bridgehistoryapi-server
assume `bridgehistoryapi-server` listening on `https://localhost:8080`
can change this port thru modify `config.json`
1. `/txs`
```
// @Summary get all txs under given address
// @Accept plain
// @Produce plain
// @Param address query string true "wallet address"
// @Param page_size query int true "page size"
// @Param page query int true "page"
// @Success 200
// @Router /api/txs [get]
```
2. `/txsbyhashes`
```
// @Summary get txs by given tx hashes
// @Accept plain
// @Produce plain
// @Param hashes query string array true "array of hashes list"
// @Success 200
// @Router /api/txsbyhashes [post]
```
3. `/claimable`
```
// @Summary get all claimable txs under given address
// @Accept plain
// @Produce plain
// @Param address query string true "wallet address"
// @Param page_size query int true "page size"
// @Param page query int true "page"
// @Success 200
// @Router /api/claimable [get]
```
4. `/withdraw_root`
```
// @Summary get withdraw_root of given batch index
// @Accept plain
// @Produce plain
// @Param batch_index query string true "batch_index"
// @Success 200
// @Router /api/withdraw_root [get]
```

View File

@@ -89,9 +89,6 @@ var (
// L2FailedRelayedMessageEventSignature = keccak256("FailedRelayedMessage(bytes32)")
L2FailedRelayedMessageEventSignature common.Hash
// L2ImportBlockEventSignature = keccak256("ImportBlock(bytes32,uint256,uint256,uint256,bytes32)")
L2ImportBlockEventSignature common.Hash
// L2AppendMessageEventSignature = keccak256("AppendMessage(uint256,bytes32)")
L2AppendMessageEventSignature common.Hash
)
@@ -153,8 +150,6 @@ func init() {
L2RelayedMessageEventSignature = L2ScrollMessengerABI.Events["RelayedMessage"].ID
L2FailedRelayedMessageEventSignature = L2ScrollMessengerABI.Events["FailedRelayedMessage"].ID
L2ImportBlockEventSignature = L1BlockContainerABI.Events["ImportBlock"].ID
L2AppendMessageEventSignature = L2MessageQueueABI.Events["AppendMessage"].ID
}

View File

@@ -116,7 +116,7 @@ require (
golang.org/x/arch v0.4.0 // indirect
golang.org/x/crypto v0.12.0 // indirect
golang.org/x/exp v0.0.0-20230810033253-352e893a4cad // indirect
golang.org/x/net v0.12.0 // indirect
golang.org/x/net v0.14.0 // indirect
golang.org/x/sync v0.3.0 // indirect
golang.org/x/sys v0.11.0 // indirect
golang.org/x/text v0.12.0 // indirect

View File

@@ -531,8 +531,8 @@ golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96b
golang.org/x/net v0.0.0-20211008194852-3b03d305991f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50=
golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA=
golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14=
golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=

View File

@@ -24,14 +24,14 @@ func NewBatchController(db *gorm.DB) *BatchController {
func (b *BatchController) GetWithdrawRootByBatchIndex(ctx *gin.Context) {
var req types.QueryByBatchIndexRequest
if err := ctx.ShouldBind(&req); err != nil {
types.RenderJSON(ctx, types.ErrParameterInvalidNo, err, nil)
types.RenderFailure(ctx, types.ErrParameterInvalidNo, err)
return
}
result, err := b.batchLogic.GetWithdrawRootByBatchIndex(ctx, req.BatchIndex)
if err != nil {
types.RenderJSON(ctx, types.ErrGetWithdrawRootByBatchIndexFailure, err, nil)
types.RenderFailure(ctx, types.ErrGetWithdrawRootByBatchIndexFailure, err)
return
}
types.RenderJSON(ctx, types.Success, nil, result)
types.RenderSuccess(ctx, result)
}

View File

@@ -10,7 +10,12 @@ var (
// HistoryCtrler is controller instance
HistoryCtrler *HistoryController
// BatchCtrler is controller instance
BatchCtrler *BatchController
BatchCtrler *BatchController
// HealthCheck the health check controller
HealthCheck *HealthCheckController
// Ready the ready controller
Ready *ReadyController
initControllerOnce sync.Once
)
@@ -19,5 +24,7 @@ func InitController(db *gorm.DB) {
initControllerOnce.Do(func() {
HistoryCtrler = NewHistoryController(db)
BatchCtrler = NewBatchController(db)
HealthCheck = NewHealthCheckController(db)
Ready = NewReadyController()
})
}

View File

@@ -0,0 +1,30 @@
package controller
import (
"github.com/gin-gonic/gin"
"gorm.io/gorm"
"bridge-history-api/internal/types"
"bridge-history-api/utils"
)
// HealthCheckController is health check API
type HealthCheckController struct {
db *gorm.DB
}
// NewHealthCheckController returns an HealthCheckController instance
func NewHealthCheckController(db *gorm.DB) *HealthCheckController {
return &HealthCheckController{
db: db,
}
}
// HealthCheck the api controller for coordinator health check
func (a *HealthCheckController) HealthCheck(c *gin.Context) {
if _, err := utils.Ping(a.db); err != nil {
types.RenderFatal(c, err)
return
}
types.RenderSuccess(c, nil)
}

View File

@@ -25,18 +25,18 @@ func NewHistoryController(db *gorm.DB) *HistoryController {
func (c *HistoryController) GetAllClaimableTxsByAddr(ctx *gin.Context) {
var req types.QueryByAddressRequest
if err := ctx.ShouldBind(&req); err != nil {
types.RenderJSON(ctx, types.ErrParameterInvalidNo, err, nil)
types.RenderFailure(ctx, types.ErrParameterInvalidNo, err)
return
}
offset := (req.Page - 1) * req.PageSize
limit := req.PageSize
txs, total, err := c.historyLogic.GetClaimableTxsByAddress(ctx, common.HexToAddress(req.Address), offset, limit)
if err != nil {
types.RenderJSON(ctx, types.ErrGetClaimablesFailure, err, nil)
types.RenderFailure(ctx, types.ErrGetClaimablesFailure, err)
return
}
types.RenderJSON(ctx, types.Success, nil, &types.ResultData{Result: txs, Total: total})
types.RenderSuccess(ctx, &types.ResultData{Result: txs, Total: total})
}
// GetAllTxsByAddr defines the http get method behavior
@@ -50,23 +50,23 @@ func (c *HistoryController) GetAllTxsByAddr(ctx *gin.Context) {
limit := req.PageSize
message, total, err := c.historyLogic.GetTxsByAddress(ctx, common.HexToAddress(req.Address), offset, limit)
if err != nil {
types.RenderJSON(ctx, types.ErrGetTxsByAddrFailure, err, nil)
types.RenderFailure(ctx, types.ErrGetTxsByAddrFailure, err)
return
}
types.RenderJSON(ctx, types.Success, nil, &types.ResultData{Result: message, Total: total})
types.RenderSuccess(ctx, &types.ResultData{Result: message, Total: total})
}
// PostQueryTxsByHash defines the http post method behavior
func (c *HistoryController) PostQueryTxsByHash(ctx *gin.Context) {
var req types.QueryByHashRequest
if err := ctx.ShouldBindJSON(&req); err != nil {
types.RenderJSON(ctx, types.ErrParameterInvalidNo, err, nil)
types.RenderFailure(ctx, types.ErrParameterInvalidNo, err)
return
}
result, err := c.historyLogic.GetTxsByHashes(ctx, req.Txs)
if err != nil {
types.RenderJSON(ctx, types.ErrGetTxsByHashFailure, err, nil)
types.RenderFailure(ctx, types.ErrGetTxsByHashFailure, err)
return
}
types.RenderJSON(ctx, types.Success, nil, &types.ResultData{Result: result, Total: 0})
types.RenderSuccess(ctx, &types.ResultData{Result: result, Total: 0})
}

View File

@@ -0,0 +1,21 @@
package controller
import (
"github.com/gin-gonic/gin"
"bridge-history-api/internal/types"
)
// ReadyController ready API
type ReadyController struct {
}
// NewReadyController returns an ReadyController instance
func NewReadyController() *ReadyController {
return &ReadyController{}
}
// Ready the api controller for coordinator ready
func (r *ReadyController) Ready(c *gin.Context) {
types.RenderSuccess(c, nil)
}

View File

@@ -25,4 +25,6 @@ func Route(router *gin.Engine, conf *config.Config) {
r.POST("/txsbyhashes", controller.HistoryCtrler.PostQueryTxsByHash)
r.GET("/claimable", controller.HistoryCtrler.GetAllClaimableTxsByAddr)
r.GET("/withdraw_root", controller.BatchCtrler.GetWithdrawRootByBatchIndex)
r.GET("/health", controller.HealthCheck.HealthCheck)
r.GET("/ready", controller.Ready.Ready)
}

View File

@@ -10,6 +10,8 @@ import (
const (
// Success shows OK.
Success = 0
// InternalServerError shows a fatal error in the server
InternalServerError = 500
// ErrParameterInvalidNo is invalid params
ErrParameterInvalidNo = 40001
// ErrGetClaimablesFailure is getting all claimables txs error
@@ -103,3 +105,28 @@ func RenderJSON(ctx *gin.Context, errCode int, err error, data interface{}) {
}
ctx.JSON(http.StatusOK, renderData)
}
// RenderSuccess renders success response with json
func RenderSuccess(ctx *gin.Context, data interface{}) {
RenderJSON(ctx, Success, nil, data)
}
// RenderFailure renders failure response with json
func RenderFailure(ctx *gin.Context, errCode int, err error) {
RenderJSON(ctx, errCode, err, nil)
}
// RenderFatal renders fatal response with json
func RenderFatal(ctx *gin.Context, err error) {
var errMsg string
if err != nil {
errMsg = err.Error()
}
renderData := Response{
ErrCode: InternalServerError,
ErrMsg: errMsg,
Data: nil,
}
ctx.Set("errcode", InternalServerError)
ctx.JSON(http.StatusInternalServerError, renderData)
}

View File

@@ -2,6 +2,7 @@ package utils
import (
"context"
"database/sql"
"fmt"
"time"
@@ -67,18 +68,29 @@ func InitDB(config *config.DBConfig) (*gorm.DB, error) {
if err != nil {
return nil, err
}
sqlDB, err := db.DB()
if err != nil {
return nil, err
sqlDB, pingErr := Ping(db)
if pingErr != nil {
return nil, pingErr
}
sqlDB.SetMaxOpenConns(config.MaxOpenNum)
sqlDB.SetMaxIdleConns(config.MaxIdleNum)
return db, nil
}
// Ping check db status
func Ping(db *gorm.DB) (*sql.DB, error) {
sqlDB, err := db.DB()
if err != nil {
return nil, err
}
if err = sqlDB.Ping(); err != nil {
return nil, err
}
return db, nil
return sqlDB, nil
}
// CloseDB close the db handler. notice the db handler only can close when then program exit.

View File

@@ -1,36 +0,0 @@
# Bridge
This repo contains the Scroll bridge.
In addition, launching the bridge will launch a separate instance of l2geth, and sets up a communication channel
between the two, over JSON-RPC sockets.
Something we should pay attention is that all private keys inside sender instance cannot be duplicated.
## Dependency
+ install `abigen`
``` bash
go install -v github.com/scroll-tech/go-ethereum/cmd/abigen
```
## Build
```bash
make clean
make bridge
```
## Start
* use default ports and config.json
```bash
./build/bin/bridge --http
```
* use specified ports and config.json
```bash
./build/bin/bridge --config ./config.json --http --http.addr localhost --http.port 8290
```

View File

@@ -1,7 +0,0 @@
package main
import "scroll-tech/bridge/cmd/event_watcher/app"
func main() {
app.Run()
}

View File

@@ -1,7 +0,0 @@
package main
import "scroll-tech/bridge/cmd/gas_oracle/app"
func main() {
app.Run()
}

View File

@@ -1,94 +0,0 @@
package app
import (
"context"
"fmt"
"os"
"os/signal"
"time"
"github.com/prometheus/client_golang/prometheus"
"github.com/scroll-tech/go-ethereum/log"
"github.com/urfave/cli/v2"
"scroll-tech/common/database"
"scroll-tech/common/metrics"
"scroll-tech/common/utils"
"scroll-tech/common/version"
"scroll-tech/bridge/internal/config"
"scroll-tech/bridge/internal/controller/relayer"
)
var app *cli.App
func init() {
// Set up message-relayer app info.
app = cli.NewApp()
app.Action = action
app.Name = "message-relayer"
app.Usage = "The Scroll Message Relayer"
app.Description = "Message Relayer contains two main service: 1) relay l1 message to l2. 2) relay l2 message to l1."
app.Version = version.Version
app.Flags = append(app.Flags, utils.CommonFlags...)
app.Commands = []*cli.Command{}
app.Before = func(ctx *cli.Context) error {
return utils.LogSetup(ctx)
}
// Register `message-relayer-test` app for integration-test.
utils.RegisterSimulation(app, utils.MessageRelayerApp)
}
func action(ctx *cli.Context) error {
// Load config file.
cfgFile := ctx.String(utils.ConfigFileFlag.Name)
cfg, err := config.NewConfig(cfgFile)
if err != nil {
log.Crit("failed to load config file", "config file", cfgFile, "error", err)
}
subCtx, cancel := context.WithCancel(ctx.Context)
// Init db connection
db, err := database.InitDB(cfg.DBConfig)
if err != nil {
log.Crit("failed to init db connection", "err", err)
}
defer func() {
cancel()
if err = database.CloseDB(db); err != nil {
log.Error("can not close ormFactory", "error", err)
}
}()
registry := prometheus.DefaultRegisterer
metrics.Server(ctx, registry.(*prometheus.Registry))
l1relayer, err := relayer.NewLayer1Relayer(ctx.Context, db, cfg.L1Config.RelayerConfig, registry)
if err != nil {
log.Error("failed to create new l1 relayer", "config file", cfgFile, "error", err)
return err
}
// Start l1relayer process
go utils.Loop(subCtx, 10*time.Second, l1relayer.ProcessSavedEvents)
// Finish start all message relayer functions
log.Info("Start message-relayer successfully")
// Catch CTRL-C to ensure a graceful shutdown.
interrupt := make(chan os.Signal, 1)
signal.Notify(interrupt, os.Interrupt)
// Wait until the interrupt signal is received from an OS signal.
<-interrupt
return nil
}
// Run message_relayer cmd instance.
func Run() {
if err := app.Run(os.Args); err != nil {
_, _ = fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
}

View File

@@ -1,7 +0,0 @@
package main
import "scroll-tech/bridge/cmd/msg_relayer/app"
func main() {
app.Run()
}

View File

@@ -1,7 +0,0 @@
package main
import "scroll-tech/bridge/cmd/rollup_relayer/app"
func main() {
app.Run()
}

View File

@@ -1,54 +0,0 @@
package relayer
import (
"sync"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
)
type l1RelayerMetrics struct {
bridgeL1RelayedMsgsTotal prometheus.Counter
bridgeL1RelayedMsgsFailureTotal prometheus.Counter
bridgeL1RelayerGasPriceOraclerRunTotal prometheus.Counter
bridgeL1RelayerLastGasPrice prometheus.Gauge
bridgeL1MsgsRelayedConfirmedTotal prometheus.Counter
bridgeL1GasOraclerConfirmedTotal prometheus.Counter
}
var (
initL1RelayerMetricOnce sync.Once
l1RelayerMetric *l1RelayerMetrics
)
func initL1RelayerMetrics(reg prometheus.Registerer) *l1RelayerMetrics {
initL1RelayerMetricOnce.Do(func() {
l1RelayerMetric = &l1RelayerMetrics{
bridgeL1RelayedMsgsTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "bridge_layer1_msg_relayed_total",
Help: "The total number of the l1 relayed message.",
}),
bridgeL1RelayedMsgsFailureTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "bridge_layer1_msg_relayed_failure_total",
Help: "The total number of the l1 relayed failure message.",
}),
bridgeL1MsgsRelayedConfirmedTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "bridge_layer1_relayed_confirmed_total",
Help: "The total number of layer1 relayed confirmed",
}),
bridgeL1RelayerGasPriceOraclerRunTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "bridge_layer1_gas_price_oracler_total",
Help: "The total number of layer1 gas price oracler run total",
}),
bridgeL1RelayerLastGasPrice: promauto.With(reg).NewGauge(prometheus.GaugeOpts{
Name: "bridge_layer1_gas_price_latest_gas_price",
Help: "The latest gas price of bridge relayer l1",
}),
bridgeL1GasOraclerConfirmedTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "bridge_layer1_gas_oracler_confirmed_total",
Help: "The total number of layer1 relayed confirmed",
}),
}
})
return l1RelayerMetric
}

View File

@@ -1,74 +0,0 @@
package relayer
import (
"sync"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
)
type l2RelayerMetrics struct {
bridgeL2RelayerProcessPendingBatchTotal prometheus.Counter
bridgeL2RelayerProcessPendingBatchSuccessTotal prometheus.Counter
bridgeL2RelayerGasPriceOraclerRunTotal prometheus.Counter
bridgeL2RelayerLastGasPrice prometheus.Gauge
bridgeL2RelayerProcessCommittedBatchesTotal prometheus.Counter
bridgeL2RelayerProcessCommittedBatchesFinalizedTotal prometheus.Counter
bridgeL2RelayerProcessCommittedBatchesFinalizedSuccessTotal prometheus.Counter
bridgeL2BatchesCommittedConfirmedTotal prometheus.Counter
bridgeL2BatchesFinalizedConfirmedTotal prometheus.Counter
bridgeL2BatchesGasOraclerConfirmedTotal prometheus.Counter
}
var (
initL2RelayerMetricOnce sync.Once
l2RelayerMetric *l2RelayerMetrics
)
func initL2RelayerMetrics(reg prometheus.Registerer) *l2RelayerMetrics {
initL2RelayerMetricOnce.Do(func() {
l2RelayerMetric = &l2RelayerMetrics{
bridgeL2RelayerProcessPendingBatchTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "bridge_layer2_process_pending_batch_total",
Help: "The total number of layer2 process pending batch",
}),
bridgeL2RelayerProcessPendingBatchSuccessTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "bridge_layer2_process_pending_batch_success_total",
Help: "The total number of layer2 process pending success batch",
}),
bridgeL2RelayerGasPriceOraclerRunTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "bridge_layer2_gas_price_oracler_total",
Help: "The total number of layer2 gas price oracler run total",
}),
bridgeL2RelayerLastGasPrice: promauto.With(reg).NewGauge(prometheus.GaugeOpts{
Name: "bridge_layer2_gas_price_latest_gas_price",
Help: "The latest gas price of bridge relayer l2",
}),
bridgeL2RelayerProcessCommittedBatchesTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "bridge_layer2_process_committed_batches_total",
Help: "The total number of layer2 process committed batches run total",
}),
bridgeL2RelayerProcessCommittedBatchesFinalizedTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "bridge_layer2_process_committed_batches_finalized_total",
Help: "The total number of layer2 process committed batches finalized total",
}),
bridgeL2RelayerProcessCommittedBatchesFinalizedSuccessTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "bridge_layer2_process_committed_batches_finalized_success_total",
Help: "The total number of layer2 process committed batches finalized success total",
}),
bridgeL2BatchesCommittedConfirmedTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "bridge_layer2_process_committed_batches_confirmed_total",
Help: "The total number of layer2 process committed batches confirmed total",
}),
bridgeL2BatchesFinalizedConfirmedTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "bridge_layer2_process_finalized_batches_confirmed_total",
Help: "The total number of layer2 process finalized batches confirmed total",
}),
bridgeL2BatchesGasOraclerConfirmedTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "bridge_layer2_process_gras_oracler_confirmed_total",
Help: "The total number of layer2 process finalized batches confirmed total",
}),
}
})
return l2RelayerMetric
}

View File

@@ -1,65 +0,0 @@
package watcher
import (
"context"
"testing"
"github.com/stretchr/testify/assert"
"scroll-tech/common/database"
"scroll-tech/common/types"
"scroll-tech/bridge/internal/config"
"scroll-tech/bridge/internal/orm"
)
// TODO: Add unit tests that the limits are enforced correctly.
func testBatchProposer(t *testing.T) {
db := setupDB(t)
defer database.CloseDB(db)
l2BlockOrm := orm.NewL2Block(db)
err := l2BlockOrm.InsertL2Blocks(context.Background(), []*types.WrappedBlock{wrappedBlock1, wrappedBlock2})
assert.NoError(t, err)
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
MaxTxNumPerChunk: 10000,
MaxL1CommitGasPerChunk: 50000000000,
MaxL1CommitCalldataSizePerChunk: 1000000,
MaxRowConsumptionPerChunk: 1048319,
ChunkTimeoutSec: 300,
}, db, nil)
cp.TryProposeChunk()
bp := NewBatchProposer(context.Background(), &config.BatchProposerConfig{
MaxChunkNumPerBatch: 10,
MaxL1CommitGasPerBatch: 50000000000,
MaxL1CommitCalldataSizePerBatch: 1000000,
BatchTimeoutSec: 300,
}, db, nil)
bp.TryProposeBatch()
batchOrm := orm.NewBatch(db)
// get all batches.
batches, err := batchOrm.GetBatches(context.Background(), map[string]interface{}{}, []string{}, 0)
assert.NoError(t, err)
assert.Len(t, batches, 1)
assert.Equal(t, uint64(0), batches[0].StartChunkIndex)
assert.Equal(t, uint64(0), batches[0].EndChunkIndex)
assert.Equal(t, types.RollupPending, types.RollupStatus(batches[0].RollupStatus))
assert.Equal(t, types.ProvingTaskUnassigned, types.ProvingStatus(batches[0].ProvingStatus))
chunkOrm := orm.NewChunk(db)
dbChunks, err := chunkOrm.GetChunksInRange(context.Background(), 0, 0)
assert.NoError(t, err)
assert.Len(t, batches, 1)
assert.Equal(t, batches[0].Hash, dbChunks[0].BatchHash)
assert.Equal(t, types.ProvingTaskUnassigned, types.ProvingStatus(dbChunks[0].ProvingStatus))
blockOrm := orm.NewL2Block(db)
blocks, err := blockOrm.GetL2Blocks(context.Background(), map[string]interface{}{}, []string{}, 0)
assert.NoError(t, err)
assert.Len(t, blocks, 2)
assert.Equal(t, dbChunks[0].Hash, blocks[0].ChunkHash)
assert.Equal(t, dbChunks[0].Hash, blocks[1].ChunkHash)
}

View File

@@ -1,68 +0,0 @@
package watcher
import (
"context"
"testing"
"github.com/stretchr/testify/assert"
"scroll-tech/common/database"
"scroll-tech/common/types"
"scroll-tech/bridge/internal/config"
"scroll-tech/bridge/internal/orm"
)
// TODO: Add unit tests that the limits are enforced correctly.
func testChunkProposer(t *testing.T) {
db := setupDB(t)
defer database.CloseDB(db)
l2BlockOrm := orm.NewL2Block(db)
err := l2BlockOrm.InsertL2Blocks(context.Background(), []*types.WrappedBlock{wrappedBlock1, wrappedBlock2})
assert.NoError(t, err)
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
MaxTxNumPerChunk: 10000,
MaxL1CommitGasPerChunk: 50000000000,
MaxL1CommitCalldataSizePerChunk: 1000000,
MaxRowConsumptionPerChunk: 1048319,
ChunkTimeoutSec: 300,
}, db, nil)
cp.TryProposeChunk()
expectedChunk := &types.Chunk{
Blocks: []*types.WrappedBlock{wrappedBlock1, wrappedBlock2},
}
expectedHash, err := expectedChunk.Hash(0)
assert.NoError(t, err)
chunkOrm := orm.NewChunk(db)
chunks, err := chunkOrm.GetChunksGEIndex(context.Background(), 0, 0)
assert.NoError(t, err)
assert.Len(t, chunks, 1)
assert.Equal(t, expectedHash.Hex(), chunks[0].Hash)
}
func testChunkProposerRowConsumption(t *testing.T) {
db := setupDB(t)
defer database.CloseDB(db)
l2BlockOrm := orm.NewL2Block(db)
err := l2BlockOrm.InsertL2Blocks(context.Background(), []*types.WrappedBlock{wrappedBlock1, wrappedBlock2})
assert.NoError(t, err)
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
MaxTxNumPerChunk: 10000,
MaxL1CommitGasPerChunk: 50000000000,
MaxL1CommitCalldataSizePerChunk: 1000000,
MaxRowConsumptionPerChunk: 0, // !
ChunkTimeoutSec: 300,
}, db, nil)
cp.TryProposeChunk()
chunkOrm := orm.NewChunk(db)
chunks, err := chunkOrm.GetChunksGEIndex(context.Background(), 0, 0)
assert.NoError(t, err)
assert.Len(t, chunks, 0)
}

View File

@@ -1,83 +0,0 @@
package tests
import (
"context"
"math/big"
"testing"
"github.com/scroll-tech/go-ethereum/accounts/abi/bind"
"github.com/scroll-tech/go-ethereum/common"
geth_types "github.com/scroll-tech/go-ethereum/core/types"
"github.com/scroll-tech/go-ethereum/rpc"
"github.com/stretchr/testify/assert"
"scroll-tech/common/database"
"scroll-tech/common/types"
"scroll-tech/bridge/internal/controller/relayer"
"scroll-tech/bridge/internal/controller/watcher"
"scroll-tech/bridge/internal/orm"
)
func testRelayL1MessageSucceed(t *testing.T) {
db := setupDB(t)
defer database.CloseDB(db)
prepareContracts(t)
l1Cfg := bridgeApp.Config.L1Config
l2Cfg := bridgeApp.Config.L2Config
// Create L1Relayer
l1Relayer, err := relayer.NewLayer1Relayer(context.Background(), db, l1Cfg.RelayerConfig, nil)
assert.NoError(t, err)
// Create L1Watcher
confirmations := rpc.LatestBlockNumber
l1Watcher := watcher.NewL1WatcherClient(context.Background(), l1Client, 0, confirmations, l1Cfg.L1MessengerAddress,
l1Cfg.L1MessageQueueAddress, l1Cfg.ScrollChainContractAddress, db, nil)
// Create L2Watcher
l2Watcher := watcher.NewL2WatcherClient(context.Background(), l2Client, confirmations, l2Cfg.L2MessengerAddress,
l2Cfg.L2MessageQueueAddress, l2Cfg.WithdrawTrieRootSlot, db, nil)
// send message through l1 messenger contract
nonce, err := l1MessengerInstance.MessageNonce(&bind.CallOpts{})
assert.NoError(t, err)
sendTx, err := l1MessengerInstance.SendMessage(l1Auth, l2Auth.From, big.NewInt(0), common.Hex2Bytes("00112233"), big.NewInt(0))
assert.NoError(t, err)
sendReceipt, err := bind.WaitMined(context.Background(), l1Client, sendTx)
assert.NoError(t, err)
if sendReceipt.Status != geth_types.ReceiptStatusSuccessful || err != nil {
t.Fatalf("Call failed")
}
// l1 watch process events
l1Watcher.FetchContractEvent()
l1MessageOrm := orm.NewL1Message(db)
// check db status
msg, err := l1MessageOrm.GetL1MessageByQueueIndex(nonce.Uint64())
assert.NoError(t, err)
assert.Equal(t, types.MsgStatus(msg.Status), types.MsgPending)
assert.Equal(t, msg.Target, l2Auth.From.String())
// process l1 messages
l1Relayer.ProcessSavedEvents()
l1Message, err := l1MessageOrm.GetL1MessageByQueueIndex(nonce.Uint64())
assert.NoError(t, err)
assert.NotEmpty(t, l1Message.Layer2Hash)
assert.Equal(t, types.MsgStatus(l1Message.Status), types.MsgSubmitted)
relayTx, _, err := l2Client.TransactionByHash(context.Background(), common.HexToHash(l1Message.Layer2Hash))
assert.NoError(t, err)
relayTxReceipt, err := bind.WaitMined(context.Background(), l2Client, relayTx)
assert.NoError(t, err)
assert.Equal(t, len(relayTxReceipt.Logs), 1)
// fetch message relayed events
l2Watcher.FetchContractEvent()
msg, err = l1MessageOrm.GetL1MessageByQueueIndex(nonce.Uint64())
assert.NoError(t, err)
assert.Equal(t, types.MsgStatus(msg.Status), types.MsgConfirmed)
}

View File

@@ -20,7 +20,7 @@ RUN find ./ | grep libzktrie.so | xargs -I{} cp {} /app/target/release/
FROM scrolltech/go-rust-builder:go-1.19-rust-nightly-2022-12-10 as base
WORKDIR /src
COPY go.work* ./
COPY ./bridge/go.* ./bridge/
COPY ./rollup/go.* ./rollup/
COPY ./common/go.* ./common/
COPY ./coordinator/go.* ./coordinator/
COPY ./database/go.* ./database/

View File

@@ -3,7 +3,7 @@ FROM scrolltech/go-alpine-builder:1.19 as base
WORKDIR /src
COPY go.work* ./
COPY ./bridge/go.* ./bridge/
COPY ./rollup/go.* ./rollup/
COPY ./common/go.* ./common/
COPY ./coordinator/go.* ./coordinator/
COPY ./database/go.* ./database/

View File

@@ -3,7 +3,7 @@ FROM scrolltech/go-alpine-builder:1.19 as base
WORKDIR /src
COPY go.work* ./
COPY ./bridge/go.* ./bridge/
COPY ./rollup/go.* ./rollup/
COPY ./common/go.* ./common/
COPY ./coordinator/go.* ./coordinator/
COPY ./database/go.* ./database/
@@ -18,7 +18,7 @@ FROM base as builder
RUN --mount=target=. \
--mount=type=cache,target=/root/.cache/go-build \
cd /src/bridge/cmd/event_watcher/ && go build -v -p 4 -o /bin/event_watcher
cd /src/rollup/cmd/event_watcher/ && go build -v -p 4 -o /bin/event_watcher
# Pull event_watcher into a second stage deploy alpine container
FROM alpine:latest

View File

@@ -3,7 +3,7 @@ FROM scrolltech/go-alpine-builder:1.19 as base
WORKDIR /src
COPY go.work* ./
COPY ./bridge/go.* ./bridge/
COPY ./rollup/go.* ./rollup/
COPY ./common/go.* ./common/
COPY ./coordinator/go.* ./coordinator/
COPY ./database/go.* ./database/
@@ -18,7 +18,7 @@ FROM base as builder
RUN --mount=target=. \
--mount=type=cache,target=/root/.cache/go-build \
cd /src/bridge/cmd/gas_oracle/ && go build -v -p 4 -o /bin/gas_oracle
cd /src/rollup/cmd/gas_oracle/ && go build -v -p 4 -o /bin/gas_oracle
# Pull gas_oracle into a second stage deploy alpine container
FROM alpine:latest

View File

@@ -1,28 +0,0 @@
# Download Go dependencies
FROM scrolltech/go-alpine-builder:1.19 as base
WORKDIR /src
COPY go.work* ./
COPY ./bridge/go.* ./bridge/
COPY ./common/go.* ./common/
COPY ./coordinator/go.* ./coordinator/
COPY ./database/go.* ./database/
COPY ./prover-stats-api/go.* ./prover-stats-api/
COPY ./prover/go.* ./prover/
COPY ./tests/integration-test/go.* ./tests/integration-test/
COPY ./bridge-history-api/go.* ./bridge-history-api/
RUN go mod download -x
# Build msg_relayer
FROM base as builder
RUN --mount=target=. \
--mount=type=cache,target=/root/.cache/go-build \
cd /src/bridge/cmd/msg_relayer/ && go build -v -p 4 -o /bin/msg_relayer
# Pull msg_relayer into a second stage deploy alpine container
FROM alpine:latest
COPY --from=builder /bin/msg_relayer /bin/
ENTRYPOINT ["msg_relayer"]

View File

@@ -1,5 +0,0 @@
assets/
docs/
l2geth/
rpc-gateway/
*target/*

View File

@@ -3,7 +3,7 @@ FROM scrolltech/go-alpine-builder:1.19 as base
WORKDIR /src
COPY go.work* ./
COPY ./bridge/go.* ./bridge/
COPY ./rollup/go.* ./rollup/
COPY ./common/go.* ./common/
COPY ./coordinator/go.* ./coordinator/
COPY ./database/go.* ./database/

View File

@@ -3,7 +3,7 @@ FROM scrolltech/go-alpine-builder:1.19 as base
WORKDIR /src
COPY go.work* ./
COPY ./bridge/go.* ./bridge/
COPY ./rollup/go.* ./rollup/
COPY ./common/go.* ./common/
COPY ./coordinator/go.* ./coordinator/
COPY ./database/go.* ./database/
@@ -18,7 +18,7 @@ FROM base as builder
RUN --mount=target=. \
--mount=type=cache,target=/root/.cache/go-build \
cd /src/bridge/cmd/rollup_relayer/ && go build -v -p 4 -o /bin/rollup_relayer
cd /src/rollup/cmd/rollup_relayer/ && go build -v -p 4 -o /bin/rollup_relayer
# Pull rollup_relayer into a second stage deploy alpine container
FROM alpine:latest

View File

@@ -3,7 +3,7 @@ set -uex
profile_name=$1
exclude_dirs=("scroll-tech/bridge/cmd" "scroll-tech/bridge/tests" "scroll-tech/bridge/mock_bridge" "scroll-tech/coordinator/cmd" "scroll-tech/coordinator/internal/logic/verifier")
exclude_dirs=("scroll-tech/rollup/cmd" "scroll-tech/rollup/tests" "scroll-tech/rollup/mock_bridge" "scroll-tech/coordinator/cmd" "scroll-tech/coordinator/internal/logic/verifier")
all_packages=$(go list ./... | grep -v "^scroll-tech/${profile_name}$")
coverpkg="scroll-tech/${profile_name}"

View File

@@ -6,7 +6,7 @@ flag_management:
default_rules:
carryforward: true
individual_flags:
- name: bridge
- name: rollup
statuses:
- type: project
target: auto

View File

@@ -5,8 +5,9 @@ import (
"os"
"os/exec"
"strings"
"sync"
"sync/atomic"
"github.com/docker/docker/pkg/reexec"
cmap "github.com/orcaman/concurrent-map"
)
@@ -26,8 +27,9 @@ type Cmd struct {
name string
args []string
mu sync.Mutex
cmd *exec.Cmd
isRunning uint64
cmd *exec.Cmd
app *exec.Cmd
checkFuncs cmap.ConcurrentMap //map[string]checkFunc
@@ -38,13 +40,23 @@ type Cmd struct {
}
// NewCmd create Cmd instance.
func NewCmd(name string, args ...string) *Cmd {
return &Cmd{
func NewCmd(name string, params ...string) *Cmd {
cmd := &Cmd{
checkFuncs: cmap.New(),
name: name,
args: args,
args: params,
ErrChan: make(chan error, 10),
cmd: exec.Command(name, params...),
app: &exec.Cmd{
Path: reexec.Self(),
Args: append([]string{name}, params...),
},
}
cmd.cmd.Stdout = cmd
cmd.cmd.Stderr = cmd
cmd.app.Stdout = cmd
cmd.app.Stderr = cmd
return cmd
}
// RegistFunc register check func
@@ -58,15 +70,14 @@ func (c *Cmd) UnRegistFunc(key string) {
}
func (c *Cmd) runCmd() {
cmd := exec.Command(c.args[0], c.args[1:]...) //nolint:gosec
cmd.Stdout = c
cmd.Stderr = c
c.ErrChan <- cmd.Run()
fmt.Println("cmd:", append([]string{c.name}, c.args...))
if atomic.CompareAndSwapUint64(&c.isRunning, 0, 1) {
c.ErrChan <- c.cmd.Run()
}
}
// RunCmd parallel running when parallel is true.
func (c *Cmd) RunCmd(parallel bool) {
fmt.Println("cmd:", c.args)
if parallel {
go c.runCmd()
} else {

View File

@@ -3,41 +3,45 @@ package cmd
import (
"fmt"
"os"
"os/exec"
"strings"
"sync/atomic"
"syscall"
"testing"
"time"
"github.com/docker/docker/pkg/reexec"
"github.com/stretchr/testify/assert"
)
// IsRunning 1 started, 0 not started.
func (c *Cmd) IsRunning() bool {
return atomic.LoadUint64(&c.isRunning) == 1
}
func (c *Cmd) runApp() {
fmt.Println("cmd:", append([]string{c.name}, c.args...))
if atomic.CompareAndSwapUint64(&c.isRunning, 0, 1) {
c.ErrChan <- c.app.Run()
}
}
// RunApp exec's the current binary using name as argv[0] which will trigger the
// reexec init function for that name (e.g. "geth-test" in cmd/geth/run_test.go)
func (c *Cmd) RunApp(waitResult func() bool) {
fmt.Println("cmd: ", append([]string{c.name}, c.args...))
cmd := &exec.Cmd{
Path: reexec.Self(),
Args: append([]string{c.name}, c.args...),
Stderr: c,
Stdout: c,
}
if waitResult != nil {
go func() {
_ = cmd.Run()
c.runApp()
}()
waitResult()
} else {
_ = cmd.Run()
c.runApp()
}
c.mu.Lock()
c.cmd = cmd
c.mu.Unlock()
}
// WaitExit wait util process exit.
func (c *Cmd) WaitExit() {
if atomic.LoadUint64(&c.isRunning) == 0 {
return
}
// Wait all the check functions are finished, interrupt loop when appear error.
var err error
for err == nil && !c.checkFuncs.IsEmpty() {
@@ -52,20 +56,18 @@ func (c *Cmd) WaitExit() {
}
// Send interrupt signal.
c.mu.Lock()
_ = c.cmd.Process.Signal(os.Interrupt)
// should use `_ = c.cmd.Process.Wait()` here, but we have some bugs in coordinator's graceful exit,
_ = c.app.Process.Signal(os.Interrupt)
// should use `_ = c.app.Process.Wait()` here, but we have some bugs in coordinator's graceful exit,
// so we use `Kill` as a temp workaround. And since `WaitExit` is only used in integration tests, so
// it won't really affect our functionalities.
_ = c.cmd.Process.Kill()
c.mu.Unlock()
if err = c.app.Process.Signal(syscall.SIGTERM); err != nil {
_ = c.app.Process.Kill()
}
}
// Interrupt send interrupt signal.
func (c *Cmd) Interrupt() {
c.mu.Lock()
c.ErrChan <- c.cmd.Process.Signal(os.Interrupt)
c.mu.Unlock()
c.ErrChan <- c.app.Process.Signal(os.Interrupt)
}
// WaitResult return true when get the keyword during timeout.

View File

@@ -12,7 +12,7 @@ import (
)
func TestCmd(t *testing.T) {
app := cmd.NewCmd("curTime", "date", "+%Y-%m-%d")
app := cmd.NewCmd("date", "+%Y-%m-%d")
tm := time.Now()
curTime := fmt.Sprintf("%d-%02d-%02d", tm.Year(), tm.Month(), tm.Day())

View File

@@ -2,6 +2,7 @@ package database
import (
"context"
"database/sql"
"fmt"
"time"
@@ -63,17 +64,15 @@ func InitDB(config *Config) (*gorm.DB, error) {
if err != nil {
return nil, err
}
sqlDB, err := db.DB()
if err != nil {
return nil, err
sqlDB, pingErr := Ping(db)
if pingErr != nil {
return nil, pingErr
}
sqlDB.SetMaxOpenConns(config.MaxOpenNum)
sqlDB.SetMaxIdleConns(config.MaxIdleNum)
if err = sqlDB.Ping(); err != nil {
return nil, err
}
return db, nil
}
@@ -88,3 +87,16 @@ func CloseDB(db *gorm.DB) error {
}
return nil
}
// Ping check db status
func Ping(db *gorm.DB) (*sql.DB, error) {
sqlDB, err := db.DB()
if err != nil {
return nil, err
}
if err = sqlDB.Ping(); err != nil {
return nil, err
}
return sqlDB, nil
}

View File

@@ -11,6 +11,10 @@ import (
"github.com/mattn/go-colorable"
"github.com/mattn/go-isatty"
"github.com/scroll-tech/go-ethereum/log"
"github.com/stretchr/testify/assert"
"scroll-tech/common/docker"
"scroll-tech/common/version"
)
func TestGormLogger(t *testing.T) {
@@ -33,3 +37,26 @@ func TestGormLogger(t *testing.T) {
gl.Info(context.Background(), "test %s warn:%v", "testInfo", errors.New("test info"))
gl.Trace(context.Background(), time.Now(), func() (string, int64) { return "test trace", 1 }, nil)
}
func TestDB(t *testing.T) {
version.Version = "v4.1.98-aaa-bbb-ccc"
base := docker.NewDockerApp()
base.RunDBImage(t)
dbCfg := &Config{
DSN: base.DBConfig.DSN,
DriverName: base.DBConfig.DriverName,
MaxOpenNum: base.DBConfig.MaxOpenNum,
MaxIdleNum: base.DBConfig.MaxIdleNum,
}
var err error
db, err := InitDB(dbCfg)
assert.NoError(t, err)
sqlDB, err := Ping(db)
assert.NoError(t, err)
assert.NotNil(t, sqlDB)
assert.NoError(t, CloseDB(db))
}

View File

@@ -27,6 +27,7 @@ var (
// AppAPI app interface.
type AppAPI interface {
IsRunning() bool
WaitResult(t *testing.T, timeout time.Duration, keyword string) bool
RunApp(waitResult func() bool)
WaitExit()

View File

@@ -36,7 +36,7 @@ func NewImgDB(image, password, dbName string, port int) ImgInstance {
dbName: dbName,
port: port,
}
img.cmd = cmd.NewCmd(img.name, img.prepare()...)
img.cmd = cmd.NewCmd("docker", img.prepare()...)
return img
}
@@ -89,7 +89,7 @@ func (i *ImgDB) IsRunning() bool {
}
func (i *ImgDB) prepare() []string {
cmd := []string{"docker", "run", "--rm", "--name", i.name, "-p", fmt.Sprintf("%d:5432", i.port)}
cmd := []string{"run", "--rm", "--name", i.name, "-p", fmt.Sprintf("%d:5432", i.port)}
envs := []string{
"-e", "POSTGRES_PASSWORD=" + i.password,
"-e", fmt.Sprintf("POSTGRES_DB=%s", i.dbName),

View File

@@ -42,7 +42,7 @@ func NewImgGeth(image, volume, ipc string, hPort, wPort int) GethImgInstance {
httpPort: hPort,
wsPort: wPort,
}
img.cmd = cmd.NewCmd(img.name, img.prepare()...)
img.cmd = cmd.NewCmd("docker", img.params()...)
return img
}
@@ -149,8 +149,8 @@ func (i *ImgGeth) Stop() error {
return cli.ContainerRemove(ctx, i.id, types.ContainerRemoveOptions{})
}
func (i *ImgGeth) prepare() []string {
cmds := []string{"docker", "run", "--rm", "--name", i.name}
func (i *ImgGeth) params() []string {
cmds := []string{"run", "--rm", "--name", i.name}
var ports []string
if i.httpPort != 0 {
ports = append(ports, []string{"-p", strconv.Itoa(i.httpPort) + ":8545"}...)

View File

@@ -1,4 +1,4 @@
FROM scrolltech/l2geth:scroll-v4.3.34
FROM scrolltech/l2geth:scroll-v4.3.55
RUN mkdir -p /l2geth/keystore

View File

@@ -14,7 +14,7 @@ require (
github.com/modern-go/reflect2 v1.0.2
github.com/orcaman/concurrent-map v1.0.0
github.com/prometheus/client_golang v1.14.0
github.com/scroll-tech/go-ethereum v1.10.14-0.20230812030736-25fe3ba69a28
github.com/scroll-tech/go-ethereum v1.10.14-0.20230829000527-f883dcdc21fc
github.com/stretchr/testify v1.8.3
github.com/urfave/cli/v2 v2.25.7
gorm.io/driver/postgres v1.5.0
@@ -117,7 +117,7 @@ require (
golang.org/x/arch v0.4.0 // indirect
golang.org/x/crypto v0.12.0 // indirect
golang.org/x/mod v0.12.0 // indirect
golang.org/x/net v0.12.0 // indirect
golang.org/x/net v0.14.0 // indirect
golang.org/x/sync v0.3.0 // indirect
golang.org/x/sys v0.11.0 // indirect
golang.org/x/text v0.12.0 // indirect

View File

@@ -434,8 +434,8 @@ github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/go-ethereum v1.10.14-0.20230812030736-25fe3ba69a28 h1:CECBTWhZ5NGAn8lGFB4ooRAYxZns8PXoX8kTR/14c04=
github.com/scroll-tech/go-ethereum v1.10.14-0.20230812030736-25fe3ba69a28/go.mod h1:DiN3p2inoXOxGffxSswDKqWjQ7bU+Mp0c9v0XQXKmaA=
github.com/scroll-tech/go-ethereum v1.10.14-0.20230829000527-f883dcdc21fc h1:eK3NOpjgm/b2TQ6rYqWx92Zri0kBuxf6gKjjsVxWKr8=
github.com/scroll-tech/go-ethereum v1.10.14-0.20230829000527-f883dcdc21fc/go.mod h1:DiN3p2inoXOxGffxSswDKqWjQ7bU+Mp0c9v0XQXKmaA=
github.com/scroll-tech/zktrie v0.6.0 h1:xLrMAO31Yo2BiPg1jtYKzcjpEFnXy8acbB7iIsyshPs=
github.com/scroll-tech/zktrie v0.6.0/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk=
github.com/segmentio/kafka-go v0.1.0/go.mod h1:X6itGqS9L4jDletMsxZ7Dz+JFWxM6JHfPOCvTvk+EJo=
@@ -576,8 +576,8 @@ golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50=
golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA=
golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14=
golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=

View File

@@ -32,7 +32,7 @@ dependencies = [
[[package]]
name = "aggregator"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.7.5#aa9a9aff698a5b253d1f3c29ea3d3006364777bf"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.8.1#fd703cc269a07f28b1febc7ce021792e2733564b"
dependencies = [
"ark-std",
"env_logger 0.10.0",
@@ -380,7 +380,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4114279215a005bc675e386011e594e1d9b800918cea18fcadadcce864a2046b"
dependencies = [
"borsh-derive",
"hashbrown 0.12.3",
"hashbrown 0.13.2",
]
[[package]]
@@ -433,7 +433,7 @@ checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1"
[[package]]
name = "bus-mapping"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.7.5#aa9a9aff698a5b253d1f3c29ea3d3006364777bf"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.8.1#fd703cc269a07f28b1febc7ce021792e2733564b"
dependencies = [
"eth-types",
"ethers-core",
@@ -447,6 +447,7 @@ dependencies = [
"lazy_static",
"log",
"mock",
"mpt-zktrie",
"once_cell",
"poseidon-circuit",
"rand",
@@ -1049,7 +1050,7 @@ dependencies = [
[[package]]
name = "eth-types"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.7.5#aa9a9aff698a5b253d1f3c29ea3d3006364777bf"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.8.1#fd703cc269a07f28b1febc7ce021792e2733564b"
dependencies = [
"ethers-core",
"ethers-signers",
@@ -1226,7 +1227,7 @@ dependencies = [
[[package]]
name = "external-tracer"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.7.5#aa9a9aff698a5b253d1f3c29ea3d3006364777bf"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.8.1#fd703cc269a07f28b1febc7ce021792e2733564b"
dependencies = [
"eth-types",
"geth-utils",
@@ -1439,7 +1440,7 @@ dependencies = [
[[package]]
name = "gadgets"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.7.5#aa9a9aff698a5b253d1f3c29ea3d3006364777bf"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.8.1#fd703cc269a07f28b1febc7ce021792e2733564b"
dependencies = [
"digest 0.7.6",
"eth-types",
@@ -1479,7 +1480,7 @@ dependencies = [
[[package]]
name = "geth-utils"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.7.5#aa9a9aff698a5b253d1f3c29ea3d3006364777bf"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.8.1#fd703cc269a07f28b1febc7ce021792e2733564b"
dependencies = [
"env_logger 0.9.3",
"gobuild 0.1.0-alpha.2 (git+https://github.com/scroll-tech/gobuild.git)",
@@ -1583,7 +1584,7 @@ dependencies = [
[[package]]
name = "halo2-base"
version = "0.2.2"
source = "git+https://github.com/scroll-tech/halo2-lib?tag=v0.1.0#2c225864227e74b207d9f4b9e08c4d5f1afc69a1"
source = "git+https://github.com/scroll-tech/halo2-lib?tag=v0.1.3#33b3b4d240ba8b7e0fbdca241c677ba84ab16db5"
dependencies = [
"ff",
"halo2_proofs",
@@ -1598,7 +1599,7 @@ dependencies = [
[[package]]
name = "halo2-ecc"
version = "0.2.2"
source = "git+https://github.com/scroll-tech/halo2-lib?tag=v0.1.0#2c225864227e74b207d9f4b9e08c4d5f1afc69a1"
source = "git+https://github.com/scroll-tech/halo2-lib?tag=v0.1.3#33b3b4d240ba8b7e0fbdca241c677ba84ab16db5"
dependencies = [
"ff",
"group",
@@ -1633,7 +1634,7 @@ dependencies = [
[[package]]
name = "halo2-mpt-circuits"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/mpt-circuit.git?tag=v0.5.1#2163a9c436ed85363c954ecf7e6e1044a1b991dc"
source = "git+https://github.com/scroll-tech/mpt-circuit.git?tag=v0.6.2#cafcdeb2c7fd6602d0ddac183c1fb5396a135f9e"
dependencies = [
"ethers-core",
"halo2_proofs",
@@ -1655,7 +1656,7 @@ dependencies = [
[[package]]
name = "halo2_proofs"
version = "0.2.0"
source = "git+https://github.com/scroll-tech/halo2.git?branch=develop#19de67c07a9b9b567580466763f93ebfbc3bb799"
source = "git+https://github.com/scroll-tech/halo2.git?branch=develop#aa86c107aeb62282d81ebce5c4930ec0c0aa540b"
dependencies = [
"ark-std",
"blake2b_simd",
@@ -1769,9 +1770,9 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "hex-literal"
version = "0.4.1"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46"
checksum = "7ebdb29d2ea9ed0083cd8cece49bbd968021bd99b0849edb4a9a7ee0fdf6a4e0"
[[package]]
name = "hmac"
@@ -2077,7 +2078,7 @@ dependencies = [
[[package]]
name = "keccak256"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.7.5#aa9a9aff698a5b253d1f3c29ea3d3006364777bf"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.8.1#fd703cc269a07f28b1febc7ce021792e2733564b"
dependencies = [
"env_logger 0.9.3",
"eth-types",
@@ -2261,10 +2262,27 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "misc-precompiled-circuit"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/misc-precompiled-circuit.git?tag=v0.1.0#f647341f9951f5c2399035728d4f6765564e2e02"
dependencies = [
"halo2-gate-generator",
"halo2_proofs",
"lazy_static",
"num-bigint",
"rand",
"serde",
"serde_json",
"strum",
"strum_macros",
"subtle",
]
[[package]]
name = "mock"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.7.5#aa9a9aff698a5b253d1f3c29ea3d3006364777bf"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.8.1#fd703cc269a07f28b1febc7ce021792e2733564b"
dependencies = [
"eth-types",
"ethers-core",
@@ -2272,6 +2290,7 @@ dependencies = [
"external-tracer",
"itertools",
"lazy_static",
"log",
"rand",
"rand_chacha",
]
@@ -2279,9 +2298,8 @@ dependencies = [
[[package]]
name = "mpt-zktrie"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.7.5#aa9a9aff698a5b253d1f3c29ea3d3006364777bf"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.8.1#fd703cc269a07f28b1febc7ce021792e2733564b"
dependencies = [
"bus-mapping",
"eth-types",
"halo2-mpt-circuits",
"halo2_proofs",
@@ -2637,7 +2655,7 @@ dependencies = [
[[package]]
name = "poseidon-circuit"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/poseidon-circuit.git?branch=scroll-dev-0723#1652d54bf7ca9d8f286b53fe077d9efefdcf6d5f"
source = "git+https://github.com/scroll-tech/poseidon-circuit.git?branch=scroll-dev-0901#69524f42bdc55c581088c2fe64c2ab9a2921146b"
dependencies = [
"bitvec 1.0.1",
"halo2_proofs",
@@ -2754,11 +2772,12 @@ dependencies = [
[[package]]
name = "prover"
version = "0.7.5"
source = "git+https://github.com/scroll-tech/scroll-prover?tag=v0.7.5#9699d40940aed2f14d8e1958167d714bca2c9984"
version = "0.8.1"
source = "git+https://github.com/scroll-tech/scroll-prover?tag=v0.8.1#5b94df914877aa7f20f7c37fabf80d73f1c8cf2c"
dependencies = [
"aggregator",
"anyhow",
"base64 0.13.1",
"blake2",
"bus-mapping",
"chrono",
@@ -2789,7 +2808,6 @@ dependencies = [
"snark-verifier-sdk",
"strum",
"strum_macros",
"types",
"zkevm-circuits",
]
@@ -3019,8 +3037,7 @@ dependencies = [
[[package]]
name = "revm-precompile"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "66837781605c6dcb7f07ad87604eeab3119dae9149d69d8839073dd6f188673d"
source = "git+https://github.com/scroll-tech/revm?branch=scroll-fix#aebf2e591e622e6bcce2c5d4bf3336935a68cf11"
dependencies = [
"k256",
"num",
@@ -3035,12 +3052,10 @@ dependencies = [
[[package]]
name = "revm-primitives"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "304d998f466ffef72d76c7f20b05bf08a96801736a6fb1fdef47d49a292618df"
version = "1.0.0"
source = "git+https://github.com/scroll-tech/revm?branch=scroll-fix#aebf2e591e622e6bcce2c5d4bf3336935a68cf11"
dependencies = [
"auto_impl",
"bitvec 1.0.1",
"bytes",
"derive_more",
"enumn",
@@ -3048,7 +3063,6 @@ dependencies = [
"hashbrown 0.13.2",
"hex",
"hex-literal",
"primitive-types 0.12.1",
"rlp",
"ruint",
"sha3 0.10.8",
@@ -3156,23 +3170,6 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "rmd160-circuits"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/misc-precompiled-circuit.git?branch=integration#31c41ca4365dcf2b6ed4f2cdcd3dc8d2e8f080df"
dependencies = [
"halo2-gate-generator",
"halo2_proofs",
"lazy_static",
"num-bigint",
"rand",
"serde",
"serde_json",
"strum",
"strum_macros",
"subtle",
]
[[package]]
name = "ruint"
version = "1.9.0"
@@ -3454,17 +3451,6 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_repr"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.27",
]
[[package]]
name = "serde_stacker"
version = "0.1.10"
@@ -3624,7 +3610,7 @@ checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9"
[[package]]
name = "snark-verifier"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/snark-verifier?tag=v0.1.2#4466059ce9a6dfaf26455e4ffb61d72af775cf52"
source = "git+https://github.com/scroll-tech/snark-verifier?tag=v0.1.4#c60146b786ab14ba60636eef0a7e427c3eb0baee"
dependencies = [
"bytes",
"ethereum-types 0.14.1",
@@ -3648,7 +3634,7 @@ dependencies = [
[[package]]
name = "snark-verifier-sdk"
version = "0.0.1"
source = "git+https://github.com/scroll-tech/snark-verifier?tag=v0.1.2#4466059ce9a6dfaf26455e4ffb61d72af775cf52"
source = "git+https://github.com/scroll-tech/snark-verifier?tag=v0.1.4#c60146b786ab14ba60636eef0a7e427c3eb0baee"
dependencies = [
"bincode",
"env_logger 0.10.0",
@@ -4037,21 +4023,6 @@ version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
[[package]]
name = "types"
version = "0.7.5"
source = "git+https://github.com/scroll-tech/scroll-prover?tag=v0.7.5#9699d40940aed2f14d8e1958167d714bca2c9984"
dependencies = [
"base64 0.13.1",
"blake2",
"eth-types",
"ethers-core",
"serde",
"serde_derive",
"serde_json",
"serde_repr",
]
[[package]]
name = "uint"
version = "0.9.5"
@@ -4491,7 +4462,7 @@ checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9"
[[package]]
name = "zkevm-circuits"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.7.5#aa9a9aff698a5b253d1f3c29ea3d3006364777bf"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.8.1#fd703cc269a07f28b1febc7ce021792e2733564b"
dependencies = [
"array-init",
"bus-mapping",
@@ -4511,6 +4482,7 @@ dependencies = [
"libsecp256k1",
"log",
"maingate",
"misc-precompiled-circuit",
"mock",
"mpt-zktrie",
"num",
@@ -4521,7 +4493,6 @@ dependencies = [
"rand_chacha",
"rand_xorshift",
"rayon",
"rmd160-circuits",
"serde",
"serde_json",
"sha3 0.10.8",
@@ -4546,7 +4517,6 @@ dependencies = [
"serde",
"serde_derive",
"serde_json",
"types",
]
[[package]]

View File

@@ -20,8 +20,7 @@ maingate = { git = "https://github.com/scroll-tech/halo2wrong", branch = "halo2-
halo2curves = { git = "https://github.com/scroll-tech/halo2curves.git", branch = "0.3.1-derive-serde" }
[dependencies]
prover = { git = "https://github.com/scroll-tech/scroll-prover", tag = "v0.7.5" }
types = { git = "https://github.com/scroll-tech/scroll-prover", tag = "v0.7.5" }
prover = { git = "https://github.com/scroll-tech/scroll-prover", tag = "v0.8.1" }
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "develop" }
base64 = "0.13.0"

View File

@@ -0,0 +1,8 @@
.PHONY: help fmt clippy test test-ci test-all
fmt:
@cargo fmt --all -- --check
clippy:
@cargo check --all-features
@cargo clippy --release -- -D warnings

View File

@@ -1,15 +1,18 @@
use crate::{
types::{CheckChunkProofsResponse, ProofResult},
utils::{c_char_to_str, c_char_to_vec, string_to_c_char, vec_to_c_char, OUTPUT_DIR},
utils::{
c_char_to_str, c_char_to_vec, file_exists, string_to_c_char, vec_to_c_char, OUTPUT_DIR,
},
};
use libc::c_char;
use prover::{
aggregator::{Prover, Verifier},
consts::AGG_VK_FILENAME,
types::eth::BlockTrace,
utils::{chunk_trace_to_witness_block, init_env_and_log},
BatchProof, ChunkHash, ChunkProof,
};
use std::{cell::OnceCell, env, panic, ptr::null};
use types::eth::BlockTrace;
static mut PROVER: OnceCell<Prover> = OnceCell::new();
static mut VERIFIER: OnceCell<Verifier> = OnceCell::new();
@@ -24,6 +27,12 @@ pub unsafe extern "C" fn init_batch_prover(params_dir: *const c_char, assets_dir
// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
// VK file must exist, it is optional and logged as a warning in prover.
if !file_exists(assets_dir, &AGG_VK_FILENAME) {
panic!("{} must exist in folder {}", *AGG_VK_FILENAME, assets_dir);
}
let prover = Prover::from_dirs(params_dir, assets_dir);
PROVER.set(prover).unwrap();

View File

@@ -1,15 +1,18 @@
use crate::{
types::ProofResult,
utils::{c_char_to_str, c_char_to_vec, string_to_c_char, vec_to_c_char, OUTPUT_DIR},
utils::{
c_char_to_str, c_char_to_vec, file_exists, string_to_c_char, vec_to_c_char, OUTPUT_DIR,
},
};
use libc::c_char;
use prover::{
consts::CHUNK_VK_FILENAME,
types::eth::BlockTrace,
utils::init_env_and_log,
zkevm::{Prover, Verifier},
ChunkProof,
};
use std::{cell::OnceCell, env, panic, ptr::null};
use types::eth::BlockTrace;
static mut PROVER: OnceCell<Prover> = OnceCell::new();
static mut VERIFIER: OnceCell<Verifier> = OnceCell::new();
@@ -24,7 +27,13 @@ pub unsafe extern "C" fn init_chunk_prover(params_dir: *const c_char, assets_dir
// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let prover = Prover::from_params_dir(params_dir);
// VK file must exist, it is optional and logged as a warning in prover.
if !file_exists(assets_dir, &CHUNK_VK_FILENAME) {
panic!("{} must exist in folder {}", *CHUNK_VK_FILENAME, assets_dir);
}
let prover = Prover::from_dirs(params_dir, assets_dir);
PROVER.set(prover).unwrap();
}

View File

@@ -3,6 +3,7 @@ use std::{
env,
ffi::{CStr, CString},
os::raw::c_char,
path::PathBuf,
};
// Only used for debugging.
@@ -26,3 +27,10 @@ pub(crate) fn string_to_c_char(string: String) -> *const c_char {
pub(crate) fn vec_to_c_char(bytes: Vec<u8>) -> *const c_char {
CString::new(bytes).unwrap().into_raw()
}
pub(crate) fn file_exists(dir: &str, filename: &str) -> bool {
let mut path = PathBuf::from(dir);
path.push(filename);
path.exists()
}

View File

@@ -1,10 +1,10 @@
package metrics
package observability
import (
"github.com/gin-gonic/gin"
"github.com/prometheus/client_golang/prometheus"
"scroll-tech/common/metrics/ginmetrics"
"scroll-tech/common/observability/ginmetrics"
)
// Use register the gin metric

View File

@@ -0,0 +1,35 @@
package observability
import (
"github.com/gin-gonic/gin"
"gorm.io/gorm"
"scroll-tech/common/database"
"scroll-tech/common/types"
)
// ProbesController probe check controller
type ProbesController struct {
db *gorm.DB
}
// NewProbesController returns an ProbesController instance
func NewProbesController(db *gorm.DB) *ProbesController {
return &ProbesController{
db: db,
}
}
// HealthCheck the api controller for health check
func (a *ProbesController) HealthCheck(c *gin.Context) {
if _, err := database.Ping(a.db); err != nil {
types.RenderFatal(c, err)
return
}
types.RenderSuccess(c, nil)
}
// Ready the api controller for ready check
func (a *ProbesController) Ready(c *gin.Context) {
types.RenderSuccess(c, nil)
}

View File

@@ -1,4 +1,4 @@
package metrics
package observability
import (
"errors"
@@ -11,17 +11,17 @@ import (
"github.com/gin-contrib/pprof"
"github.com/gin-gonic/gin"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promhttp"
"github.com/scroll-tech/go-ethereum/log"
"github.com/urfave/cli/v2"
"gorm.io/gorm"
"scroll-tech/common/utils"
)
// Server starts the metrics server on the given address, will be closed when the given
// context is canceled.
func Server(c *cli.Context, reg *prometheus.Registry) {
func Server(c *cli.Context, db *gorm.DB) {
if !c.Bool(utils.MetricsEnabled.Name) {
return
}
@@ -33,6 +33,10 @@ func Server(c *cli.Context, reg *prometheus.Registry) {
promhttp.Handler().ServeHTTP(context.Writer, context.Request)
})
probeController := NewProbesController(db)
r.GET("/health", probeController.HealthCheck)
r.GET("/ready", probeController.Ready)
address := fmt.Sprintf(":%s", c.String(utils.MetricsPort.Name))
server := &http.Server{
Addr: address,

View File

@@ -103,6 +103,12 @@ const (
ProverTaskFailureTypeUndefined ProverTaskFailureType = iota
// ProverTaskFailureTypeTimeout prover task failure of timeout
ProverTaskFailureTypeTimeout
// ProverTaskFailureTypeSubmitStatusNotOk prover task failure of validated failed by coordinator
ProverTaskFailureTypeSubmitStatusNotOk
// ProverTaskFailureTypeVerifiedFailed prover task failure of verified failed by coordinator
ProverTaskFailureTypeVerifiedFailed
// ProverTaskFailureTypeServerError collect occur error
ProverTaskFailureTypeServerError
)
func (r ProverTaskFailureType) String() string {
@@ -111,8 +117,14 @@ func (r ProverTaskFailureType) String() string {
return "prover task failure undefined"
case ProverTaskFailureTypeTimeout:
return "prover task failure timeout"
case ProverTaskFailureTypeSubmitStatusNotOk:
return "prover task failure validated submit proof status not ok"
case ProverTaskFailureTypeVerifiedFailed:
return "prover task failure verified failed"
case ProverTaskFailureTypeServerError:
return "prover task failure server exception"
default:
return "illegal prover task failure type"
return fmt.Sprintf("illegal prover task failure type (%d)", int32(r))
}
}

View File

@@ -3,12 +3,21 @@ package types
const (
// Success shows OK.
Success = 0
// InternalServerError shows a fatal error in the server
InternalServerError = 500
// ErrJWTCommonErr jwt common error
ErrJWTCommonErr = 50000
// ErrJWTTokenExpired jwt token expired
ErrJWTTokenExpired = 50001
// ErrProverStatsAPIParameterInvalidNo is invalid params
ErrProverStatsAPIParameterInvalidNo = 10001
// ErrProverStatsAPIProverTaskFailure is getting prover task error
ErrProverStatsAPIProverTaskFailure = 10002
// ErrProverStatsAPIProverTotalRewardFailure is getting total rewards error
ErrProverStatsAPIProverTotalRewardFailure = 10003
// ErrCoordinatorParameterInvalidNo is invalid params
ErrCoordinatorParameterInvalidNo = 20001
// ErrCoordinatorGetTaskFailure is getting prover task error

53
common/types/response.go Normal file
View File

@@ -0,0 +1,53 @@
package types
import (
"net/http"
"github.com/gin-gonic/gin"
)
// Response the response schema
type Response struct {
ErrCode int `json:"errcode"`
ErrMsg string `json:"errmsg"`
Data interface{} `json:"data"`
}
// RenderJSON renders response with json
func RenderJSON(ctx *gin.Context, errCode int, err error, data interface{}) {
var errMsg string
if err != nil {
errMsg = err.Error()
}
renderData := Response{
ErrCode: errCode,
ErrMsg: errMsg,
Data: data,
}
ctx.JSON(http.StatusOK, renderData)
}
// RenderSuccess renders success response with json
func RenderSuccess(ctx *gin.Context, data interface{}) {
RenderJSON(ctx, Success, nil, data)
}
// RenderFailure renders failure response with json
func RenderFailure(ctx *gin.Context, errCode int, err error) {
RenderJSON(ctx, errCode, err, nil)
}
// RenderFatal renders fatal response with json
func RenderFatal(ctx *gin.Context, err error) {
var errMsg string
if err != nil {
errMsg = err.Error()
}
renderData := Response{
ErrCode: InternalServerError,
ErrMsg: errMsg,
Data: nil,
}
ctx.Set("errcode", InternalServerError)
ctx.JSON(http.StatusInternalServerError, renderData)
}

27
common/utils/http.go Normal file
View File

@@ -0,0 +1,27 @@
package utils
import (
"net/http"
"time"
)
// StartHTTPServer a public http server to be used.
func StartHTTPServer(address string, handler http.Handler) (*http.Server, error) {
srv := &http.Server{
Handler: handler,
Addr: address,
ReadTimeout: time.Second * 3,
WriteTimeout: time.Second * 3,
IdleTimeout: time.Second * 12,
}
errCh := make(chan error, 1)
go func() {
errCh <- srv.ListenAndServe()
}()
select {
case err := <-errCh:
return nil, err
case <-time.After(time.Second):
}
return srv, nil
}

View File

@@ -2,6 +2,9 @@ package utils
import (
"context"
"crypto/rand"
"fmt"
"math/big"
"time"
"github.com/modern-go/reflect2"
@@ -50,3 +53,9 @@ func Loop(ctx context.Context, period time.Duration, f func()) {
func IsNil(i interface{}) bool {
return i == nil || reflect2.IsNil(i)
}
// RandomURL return a random port endpoint.
func RandomURL() string {
id, _ := rand.Int(rand.Reader, big.NewInt(5000-1))
return fmt.Sprintf("localhost:%d", 10000+2000+id.Int64())
}

View File

@@ -0,0 +1,21 @@
package version
import (
"strings"
)
// CheckScrollProverVersion check the "scroll-prover" version, if it's different from the local one, return false
func CheckScrollProverVersion(proverVersion string) bool {
// note the the version is in fact in the format of "tag-commit-scroll_prover-halo2",
// so split-by-'-' length should be 4
remote := strings.Split(proverVersion, "-")
if len(remote) != 4 {
return false
}
local := strings.Split(Version, "-")
if len(local) != 4 {
return false
}
// compare the `scroll_prover` version
return remote[2] == local[2]
}

View File

@@ -3,11 +3,9 @@ package version
import (
"fmt"
"runtime/debug"
"strconv"
"strings"
)
var tag = "v4.2.13"
var tag = "v4.3.4"
var commit = func() string {
if info, ok := debug.ReadBuildInfo(); ok {
@@ -31,52 +29,3 @@ var ZkVersion = "000000-000000"
// Version denote the version of scroll protocol, including the l2geth, relayer, coordinator, prover, contracts and etc.
var Version = fmt.Sprintf("%s-%s-%s", tag, commit, ZkVersion)
// CheckScrollProverVersion check the "scroll-prover" version, if it's different from the local one, return false
func CheckScrollProverVersion(proverVersion string) bool {
// note the the version is in fact in the format of "tag-commit-scroll_prover-halo2",
// so split-by-'-' length should be 4
remote := strings.Split(proverVersion, "-")
if len(remote) != 4 {
return false
}
local := strings.Split(Version, "-")
if len(local) != 4 {
return false
}
// compare the `scroll_prover` version
return remote[2] == local[2]
}
// CheckScrollProverVersionTag check the "scroll-prover" version's tag, if it's too old, return false
func CheckScrollProverVersionTag(proverVersion string) bool {
// note the the version is in fact in the format of "tag-commit-scroll_prover-halo2",
// so split-by-'-' length should be 4
remote := strings.Split(proverVersion, "-")
if len(remote) != 4 {
return false
}
remoteTagNums := strings.Split(strings.TrimPrefix(remote[0], "v"), ".")
if len(remoteTagNums) != 3 {
return false
}
remoteTagMajor, err := strconv.Atoi(remoteTagNums[0])
if err != nil {
return false
}
remoteTagMinor, err := strconv.Atoi(remoteTagNums[1])
if err != nil {
return false
}
remoteTagPatch, err := strconv.Atoi(remoteTagNums[2])
if err != nil {
return false
}
if remoteTagMajor < 4 {
return false
}
if remoteTagMinor == 1 && remoteTagPatch < 98 {
return false
}
return true
}

View File

@@ -1,78 +1,78 @@
# Scroll Contracts
Note: For more comprehensive documentation, see [`./docs/`](./docs).
This directory contains the solidity code for Scroll L1 bridge and rollup contracts and L2 bridge and pre-deployed contracts. The [`specs`](../specs/) folder describes the overall Scroll protocol including the cross-domain messaging and rollup process. You can also find contract APIs and more details in the [`docs`](./docs) folder.
## Directory Structure
```
integration-test
|- xxx.test.ts - "Hardhat integration tests"
lib
|- forge-std - "foundry dependency"
scripts
|- deploy_xxx.ts - "hardhat deploy script"
|- foundry - "foundry deploy scripts"
src
|- test
| `- xxx.t.sol - "Unit testi in solidity"
`- xxx.sol - "solidity contract"
.gitmodules - "foundry dependecy modules"
foundry.toml - "configure foundry"
hardhat.config.ts - "configure hardhat"
remappings.txt - "foundry dependency mappings"
<pre>
├── <a href="./docs/">docs</a>: Documentation for the contracts
├── <a href="./integration-test/">integration-test</a>: Hardhat integration tests
├── <a href="./lib/">lib</a>: External libraries and testing tools
├── <a href="./scripts">scripts</a>: Deployment scripts
├── <a href="./src">src</a>
│ ├── <a href="./src/gas-swap/">gas-swap</a>: Utility contract that allows gas payment in other tokens
│ ├── <a href="./src/interfaces/">interfaces</a>: Common contract interfaces
│ ├── <a href="./src/L1/">L1</a>: Contracts deployed on the L1 (Ethereum)
│ │ ├── <a href="./src/L1/gateways/">gateways</a>: Gateway router and token gateway contracts
│ │ ├── <a href="./src/L1/rollup/">rollup</a>: Rollup contracts for data availability and finalization
│ │ ├── <a href="./src/L1/IL1ScrollMessenger.sol">IL1ScrollMessenger.sol</a>: L1 Scroll messenger interface
│ │ └── <a href="./src/L1/L1ScrollMessenger.sol">L1ScrollMessenger.sol</a>: L1 Scroll messenger contract
│ ├── <a href="./src/L2/">L2</a>: Contracts deployed on the L2 (Scroll)
│ │ ├── <a href="./src/L2/gateways/">gateways</a>: Gateway router and token gateway contracts
│ │ ├── <a href="./src/L2/predeploys/">predeploys</a>: Pre-deployed contracts on L2
│ │ ├── <a href="./src/L2/IL2ScrollMessenger.sol">IL2ScrollMessenger.sol</a>: L2 Scroll messenger interface
│ │ └── <a href="./src/L2/L2ScrollMessenger.sol">L2ScrollMessenger.sol</a>: L2 Scroll messenger contract
│ ├── <a href="./src/libraries/">libraries</a>: Shared contract libraries
│ ├── <a href="./src/misc/">misc</a>: Miscellaneous contracts
│ ├── <a href="./src/mocks/">mocks</a>: Mock contracts used in the testing
│ ├── <a href="./src/rate-limiter/">rate-limiter</a>: Rater limiter contract
│ └── <a href="./src/test/">test</a>: Unit tests in solidity
├── <a href="./foundry.toml">foundry.toml</a>: Foundry configuration
├── <a href="./hardhat.config.ts">hardhat.config.ts</a>: Hardhat configuration
├── <a href="./remappings.txt">remappings.txt</a>: Foundry dependency mappings
...
```
</pre>
## Dependencies
### Node.js
First install [`Node.js`](https://nodejs.org/en) and [`npm`](https://www.npmjs.com/).
Run the following command to install [`yarn`](https://classic.yarnpkg.com/en/):
```bash
npm install --global yarn
```
### Foundry
First run the command below to get foundryup, the Foundry toolchain installer:
Install `foundryup`, the Foundry toolchain installer:
```bash
curl -L https://foundry.paradigm.xyz | bash
```
If you do not want to use the redirect, feel free to manually download the foundryup installation script from [here](https://raw.githubusercontent.com/foundry-rs/foundry/master/foundryup/foundryup).
If you do not want to use the redirect, feel free to manually download the `foundryup` installation script from [here](https://raw.githubusercontent.com/foundry-rs/foundry/master/foundryup/foundryup).
Then, run `foundryup` in a new terminal session or after reloading your `PATH`.
Then, run `foundryup` in a new terminal session or after reloading `PATH`.
Other ways to install Foundry can be found [here](https://github.com/foundry-rs/foundry#installation).
### Hardhat
Run the following command to install [Hardhat](https://hardhat.org/) and other dependencies.
```
yarn install
```
## Build
- Run `git submodule update --init --recursive` to initialise git submodules.
- Run `git submodule update --init --recursive` to initialize git submodules.
- Run `yarn prettier:solidity` to run linting in fix mode, will auto-format all solidity codes.
- Run `yarn prettier` to run linting in fix mode, will auto-format all typescript codes.
- Run `yarn prepare` to install the precommit linting hook
- Run `yarn prepare` to install the precommit linting hook.
- Run `forge build` to compile contracts with foundry.
- Run `npx hardhat compile` to compile with hardhat.
- Run `forge test -vvv` to run foundry units tests. It will compile all contracts before running the unit tests.
- Run `npx hardhat test` to run integration tests. It may not compile all contracts before running, it's better to run `npx hardhat compile` first.
## TODO
- [ ] unit tests
- [ ] L1 Messenger
- [x] L1 Gateways
- [x] L1 Gateway Router
- [ ] L2 Messenger
- [x] L2 Gateways
- [x] L2 Gateway Router
- [x] ScrollStandardERC20Factory
- [x] Whitelist
- [ ] SimpleGasOracle
- [ ] integration tests
- [x] ERC20Gateway
- [x] GatewayRouter
- [ ] ZKRollup contracts
- [x] Gas Oracle contracts for cross chain message call
- [ ] ERC721/ERC115 interface design
- [ ] add proof verification codes
- [ ] security analysis

View File

@@ -1,33 +0,0 @@
# Cross Domain Messaging
Like other layer 2 protocol, Scroll allow dapps to communicate between layer 1 and layer 2. More specifically, dapps on layer 1 can trigger contract functions in layer 2, and vice versa.
## Message Between L1 and L2
The Scroll protocol implements two core contracts `L1ScrollMessenger` and `L2ScrollMessenger` to enable cross domain messaging. The only entry to send cross domain message is to call the following function:
```solidity
function sendMessage(
address _to,
bytes memory _message,
uint256 _gasLimit
) external payable
```
The function is attached in both messenger in layer 1 and layer 2. After that, the Sequencer will handle the rest part for you. We will explain the detailed workflow in the following docs.
### Send Message from L1 to L2
As described above, the first step is to call `L1ScrollMessenger.sendMessage` in layer 1. The `L1ScrollMessenger` contract will emit a `SentMessage` event, which will be notified by the Sequencer. The Sequencer will for the confirmation of the function call in layer 1. Normally, the Sequencer will wait for 10-20 blocks. After that, the Sequencer will initiate a transaction in layer 2, calling function `L2ScrollMessenger.relayMessage` and finally, the message is executed in layer 2.
The execution in layer 2 may be failed due to out of gas problem. In such case, one can call `L1ScrollMessenger.replayMessage` to replace the message with a larger gas limit. And the Sequencer will follow the steps and execute the message again in layer 2.
### Send Message from L2 to L1
Similar to sending message from L1 to L2, you should call `L2ScrollMessenger.sendMessage` first in layer 2. The `L2ScrollMessenger` contract will emit a `SentMessage` event, which will be notified by the Sequencer. Unlike above, the Sequencer will first batch submit layer 2 transactions (or block) to `ZKRollup` contract in layer 1. Then the Sequencer will wait the proof generated by prover and submit the proof to `ZKRollup` contract in layer 1 again. Finally, anyone can call `L1ScrollMessenger.relayMessageWithProof` with correct proof to execute the message in layer 1.
Currently, for the safety reason, we only allow privileged contracts to send cross domain messages. And only privileged accounts can call `L2ScrollMessenger.relayMessage`.
## Fee For Sending Message
to be discussed.

View File

@@ -1,22 +0,0 @@
# Overview
![](./assets/overview.png)
The above picture is the overview of the contract design. There are several components both in layer 1 and layer 2: L1/L2 Scroll Messenger, various L1/L2 Gateways and L1/L2 Gateway Router. Besides these, there is a Rollup component only in layer 1.
The followings are the detailed docs for each component (docs are generated automatically by `@primitivefi/hardhat-dodoc` plugin):
- [L1 Scroll Messenger](./apis/L1ScrollMessenger.md) and [L2 Scroll Messenger](./apis/L2ScrollMessenger.md): Main entry for sending and relaying cross domain message.
- [Rollup](./apis/ZKRollup.md)
- [L1 Gateway Router](./apis/L1GatewayRouter.md) and [L2 Gateway Router](./apis/L2GatewayRouter.md): Router contract for depositing/withdrawing Ethers and ERC20 tokens.
- L1/L2 Gateways:
- [L1 Standard ERC20 Gateway](./apis/L1StandardERC20Gateway.md) and [L2 Standard ERC20 Gateway](./apis/L2StandardERC20Gateway.md)
- [L1 WETH Gateway](./apis/L1WETHGateway.md) and [L2 WETH Gateway](./apis/L2WETHGateway.md)
- [L1 ERC721 Gateway](./apis/L1ERC721Gateway.md) and [L2 ERC721 Gateway](./apis/L2ERC721Gateway.md)
- [L1 ERC1155 Gateway](./apis/L1ERC1155Gateway.md) and [L2 ERC1155 Gateway](./apis/L2ERC1155Gateway.md)
- [ScrollStandardERC20Factory](./apis/ScrollStandardERC20Factory.md): The `ScrollStandardERC20` token factory used by `L2StandardERC20Gateway`.
There are two main applications: Token Bridge and Cross Domain Messaging. You can find the documentations in the links below:
- [Token Bridge](./TokenBridge.md): moving token from layer 1 to layer 2, or from layer 2 to layer 1.
- [Cross Domain Messaging](./CrossDomainMessaging.md): sending data to layer 2 from layer 1, or sending data to layer 2 from layer 1. Basically, it will help to trigger function call cross layer. The token bridge also use cross domain messaging to achieve its functionality.

View File

@@ -1,134 +0,0 @@
# Bridge Token Between Layer 1 and Layer 2
The Token Bridge of Scroll Protocol offers a way to move assets from layer 1 to layer 2 and back, including Ether, ERC20 token, ERC-721 token, ERC-1155 token, etc. The asset should be deposited and locked in layer 1 and then in exchange of the same amount of an equivalent token on layer 2. For example, if you deposit 1000 Ether in layer 1, you will get 1000 Ether in layer 2 for return. And if you withdraw 1000 Ether in layer 2, you will get 1000 Ether in layer 1 for return.
The Ether and ERC20 tokens can be deposited or withdrawn using one single contract `GatewayRouter` (`L1GatewayRouter` in layer 1 and `L2GatewayRouter` in layer 2). The ERC-721 tokens and ERC-1155 tokens can be deposited or withdrawn using the corresponding `ERC1155Gateway` and `ERC721Gateway` in layer 1 or layer 2 (They may be integrated into `GatewayRouter` in the future).
## Bridge Ether
To bridge Ether from layer 1 to layer 2, one can use `L1GatewayRouter.depositETH`. This will transfer ethers to the `L1ScrollMessenger` contract on the layer 1 and credits the same amount of ether to you in layer 2 at the specified address.
```solidity
function depositETH(uint256 _gasLimit) external payable;
function depositETH(address _to, uint256 _gasLimit) external payable;
```
In the layer 1, all deposited Ether will be locked in `L1ScrollMessenger` contract. It means your deposited Ether will firstly be transfered to `L1GatewayRouter` contract and then to `L1ScrollMessenger` contract.
To withdraw Ether from layer 2 to layer 1, one can use `L2GatewayRouter.withdrawETH`.
```solidity
function withdrawETH(uint256 _gasLimit) external payable;
function withdrawETH(address _to, uint256 _gasLimit) external payable;
```
In layer 2, the `L2ScrollMessenger` holds infinite amount of Ether at the beginning. All your withdrawn Ether will be transfered back to `L2ScrollMessenger`, just like the process in layer 1.
In addition, you can actually call `sendMessage` from the `L1ScrollMessenger` or `L2ScrollMessenger` contract to deposit or withdraw Ether. The `L1GatewayRouter.depositETH` and `L2GatewayRouter.withdrawETH` are just alias for `L1ScrollMessenger/L2ScrollMessenger.sendMessage`.
## Bridge ERC20 Tokens
We use the similar design as [Arbitrum protocol](https://developer.offchainlabs.com/docs/bridging_assets#bridging-erc20-tokens) do. Several gateway contracts are used to bridge different kinds of ERC20 tokens, such as Wrapped Ether, standard ERC20 tokens, etc.
We implement a `StandardERC20Gateway` to deposit and withdraw standard ERC20 tokens. The standard procedure to deposit ERC20 tokens is to call `L1GatewayRouter.depositERC20` in layer 1. The token will be locked in `L1StandardERC20Gateway` contract in layer 1. The the standard procedure to withdraw ERC20 tokens is to call `L2GatewayRouter.withdrawRC20` in layer 2 and the token will be burned in layer 2.
For many other non-standard ERC20 tokens, we provide a custom ERC20 gateway. Anyone can implement such gateway as long as it implements all required interfaces. We implement the Wrapped Ether gateway as an example. To deposit or withdraw Wrapped Ether, one should first unwrap it to Ether, then transfer the Ether to `ScrollMessenger` just like Ether bridging.
### Passing data when depositing ERC20 tokens
The Scroll protocol offer a way to call another contract after depositing the token in layer 2 by calling `L1GatewayRouter.depositERC20AndCall` in layer 1. The ERC20 token in layer 2 implements the [ERC 677 Standard](https://github.com/ethereum/EIPs/issues/677). By using `transferAndCall` function, we can transfer the token to corresponding recipient in layer 2 and then call the recipient with passed data.
```solidity
function depositERC20AndCall(
address _token,
address _to,
uint256 _amount,
bytes memory _data,
uint256 _gasLimit
) external;
```
Like Bridging Ether, all above functionality can be achieved by calling corresponding function in ERC20Gateway contract.
## Bridge ERC-721/ERC-1155 Tokens
The depositing/withdrawing ERC-721 or ERC-1155 tokens works very similar to ERC20 tokens. One can use the following function to deposit ERC-721/ERC-1155 tokens in layer 1.
```solidity
function depositERC1155(
address _token,
uint256 _tokenId,
uint256 _amount,
uint256 _gasLimit
) external;
function depositERC1155(
address _token,
address _to,
uint256 _tokenId,
uint256 _amount,
uint256 _gasLimit
) external;
function depositERC721(
address _token,
uint256 _tokenId,
uint256 _gasLimit
) external;
function depositERC721(
address _token,
address _to,
uint256 _tokenId,
uint256 _gasLimit
) external;
```
One can use the following function to withdraw ERC-721/ERC-1155 tokens in layer 2.
```solidity
function withdrawERC1155(
address _token,
uint256 _tokenId,
uint256 _amount,
uint256 _gasLimit
) external;
function withdrawERC1155(
address _token,
address _to,
uint256 _tokenId,
uint256 _amount,
uint256 _gasLimit
) external;
function withdrawERC721(
address _token,
uint256 _tokenId,
uint256 _gasLimit
) external;
function withdrawERC721(
address _token,
address _to,
uint256 _tokenId,
uint256 _gasLimit
) external;
```
To save the gas usage, we also provide a batch deposit/withdraw function, such as `batchDepositERC1155` and `batchDepositERC721`, by passing a list of token ids to the function.
## Drop Depositing/Withdrawing
Coming soon...
## Force Exit
Coming soon...

View File

@@ -0,0 +1,685 @@
# ScrollChain
> ScrollChain
This contract maintains data for the Scroll rollup.
## Methods
### addProver
```solidity
function addProver(address _account) external nonpayable
```
Add an account to the prover list.
#### Parameters
| Name | Type | Description |
|---|---|---|
| _account | address | The address of account to add. |
### addSequencer
```solidity
function addSequencer(address _account) external nonpayable
```
Add an account to the sequencer list.
#### Parameters
| Name | Type | Description |
|---|---|---|
| _account | address | The address of account to add. |
### commitBatch
```solidity
function commitBatch(uint8 _version, bytes _parentBatchHeader, bytes[] _chunks, bytes _skippedL1MessageBitmap) external nonpayable
```
Commit a batch of transactions on layer 1.
#### Parameters
| Name | Type | Description |
|---|---|---|
| _version | uint8 | undefined |
| _parentBatchHeader | bytes | undefined |
| _chunks | bytes[] | undefined |
| _skippedL1MessageBitmap | bytes | undefined |
### committedBatches
```solidity
function committedBatches(uint256) external view returns (bytes32)
```
Return the batch hash of a committed batch.
#### Parameters
| Name | Type | Description |
|---|---|---|
| _0 | uint256 | undefined |
#### Returns
| Name | Type | Description |
|---|---|---|
| _0 | bytes32 | undefined |
### finalizeBatchWithProof
```solidity
function finalizeBatchWithProof(bytes _batchHeader, bytes32 _prevStateRoot, bytes32 _postStateRoot, bytes32 _withdrawRoot, bytes _aggrProof) external nonpayable
```
Finalize a committed batch on layer 1.
#### Parameters
| Name | Type | Description |
|---|---|---|
| _batchHeader | bytes | undefined |
| _prevStateRoot | bytes32 | undefined |
| _postStateRoot | bytes32 | undefined |
| _withdrawRoot | bytes32 | undefined |
| _aggrProof | bytes | undefined |
### finalizedStateRoots
```solidity
function finalizedStateRoots(uint256) external view returns (bytes32)
```
Return the state root of a committed batch.
#### Parameters
| Name | Type | Description |
|---|---|---|
| _0 | uint256 | undefined |
#### Returns
| Name | Type | Description |
|---|---|---|
| _0 | bytes32 | undefined |
### importGenesisBatch
```solidity
function importGenesisBatch(bytes _batchHeader, bytes32 _stateRoot) external nonpayable
```
Import layer 2 genesis block
#### Parameters
| Name | Type | Description |
|---|---|---|
| _batchHeader | bytes | undefined |
| _stateRoot | bytes32 | undefined |
### initialize
```solidity
function initialize(address _messageQueue, address _verifier, uint256 _maxNumTxInChunk) external nonpayable
```
#### Parameters
| Name | Type | Description |
|---|---|---|
| _messageQueue | address | undefined |
| _verifier | address | undefined |
| _maxNumTxInChunk | uint256 | undefined |
### isBatchFinalized
```solidity
function isBatchFinalized(uint256 _batchIndex) external view returns (bool)
```
Return whether the batch is finalized by batch index.
#### Parameters
| Name | Type | Description |
|---|---|---|
| _batchIndex | uint256 | undefined |
#### Returns
| Name | Type | Description |
|---|---|---|
| _0 | bool | undefined |
### isProver
```solidity
function isProver(address) external view returns (bool)
```
Whether an account is a prover.
#### Parameters
| Name | Type | Description |
|---|---|---|
| _0 | address | undefined |
#### Returns
| Name | Type | Description |
|---|---|---|
| _0 | bool | undefined |
### isSequencer
```solidity
function isSequencer(address) external view returns (bool)
```
Whether an account is a sequencer.
#### Parameters
| Name | Type | Description |
|---|---|---|
| _0 | address | undefined |
#### Returns
| Name | Type | Description |
|---|---|---|
| _0 | bool | undefined |
### lastFinalizedBatchIndex
```solidity
function lastFinalizedBatchIndex() external view returns (uint256)
```
The latest finalized batch index.
#### Returns
| Name | Type | Description |
|---|---|---|
| _0 | uint256 | undefined |
### layer2ChainId
```solidity
function layer2ChainId() external view returns (uint64)
```
The chain id of the corresponding layer 2 chain.
#### Returns
| Name | Type | Description |
|---|---|---|
| _0 | uint64 | undefined |
### maxNumTxInChunk
```solidity
function maxNumTxInChunk() external view returns (uint256)
```
The maximum number of transactions allowed in each chunk.
#### Returns
| Name | Type | Description |
|---|---|---|
| _0 | uint256 | undefined |
### messageQueue
```solidity
function messageQueue() external view returns (address)
```
The address of L1MessageQueue.
#### Returns
| Name | Type | Description |
|---|---|---|
| _0 | address | undefined |
### owner
```solidity
function owner() external view returns (address)
```
*Returns the address of the current owner.*
#### Returns
| Name | Type | Description |
|---|---|---|
| _0 | address | undefined |
### paused
```solidity
function paused() external view returns (bool)
```
*Returns true if the contract is paused, and false otherwise.*
#### Returns
| Name | Type | Description |
|---|---|---|
| _0 | bool | undefined |
### removeProver
```solidity
function removeProver(address _account) external nonpayable
```
Add an account from the prover list.
#### Parameters
| Name | Type | Description |
|---|---|---|
| _account | address | The address of account to remove. |
### removeSequencer
```solidity
function removeSequencer(address _account) external nonpayable
```
Remove an account from the sequencer list.
#### Parameters
| Name | Type | Description |
|---|---|---|
| _account | address | The address of account to remove. |
### renounceOwnership
```solidity
function renounceOwnership() external nonpayable
```
*Leaves the contract without owner. It will not be possible to call `onlyOwner` functions. Can only be called by the current owner. NOTE: Renouncing ownership will leave the contract without an owner, thereby disabling any functionality that is only available to the owner.*
### revertBatch
```solidity
function revertBatch(bytes _batchHeader, uint256 _count) external nonpayable
```
Revert a pending batch.
*If the owner want to revert a sequence of batches by sending multiple transactions, make sure to revert recent batches first.*
#### Parameters
| Name | Type | Description |
|---|---|---|
| _batchHeader | bytes | undefined |
| _count | uint256 | undefined |
### setPause
```solidity
function setPause(bool _status) external nonpayable
```
Pause the contract
#### Parameters
| Name | Type | Description |
|---|---|---|
| _status | bool | The pause status to update. |
### transferOwnership
```solidity
function transferOwnership(address newOwner) external nonpayable
```
*Transfers ownership of the contract to a new account (`newOwner`). Can only be called by the current owner.*
#### Parameters
| Name | Type | Description |
|---|---|---|
| newOwner | address | undefined |
### updateMaxNumTxInChunk
```solidity
function updateMaxNumTxInChunk(uint256 _maxNumTxInChunk) external nonpayable
```
Update the value of `maxNumTxInChunk`.
#### Parameters
| Name | Type | Description |
|---|---|---|
| _maxNumTxInChunk | uint256 | The new value of `maxNumTxInChunk`. |
### updateVerifier
```solidity
function updateVerifier(address _newVerifier) external nonpayable
```
Update the address verifier contract.
#### Parameters
| Name | Type | Description |
|---|---|---|
| _newVerifier | address | The address of new verifier contract. |
### verifier
```solidity
function verifier() external view returns (address)
```
The address of RollupVerifier.
#### Returns
| Name | Type | Description |
|---|---|---|
| _0 | address | undefined |
### withdrawRoots
```solidity
function withdrawRoots(uint256) external view returns (bytes32)
```
Return the message root of a committed batch.
#### Parameters
| Name | Type | Description |
|---|---|---|
| _0 | uint256 | undefined |
#### Returns
| Name | Type | Description |
|---|---|---|
| _0 | bytes32 | undefined |
## Events
### CommitBatch
```solidity
event CommitBatch(uint256 indexed batchIndex, bytes32 indexed batchHash)
```
Emitted when a new batch is committed.
#### Parameters
| Name | Type | Description |
|---|---|---|
| batchIndex `indexed` | uint256 | undefined |
| batchHash `indexed` | bytes32 | undefined |
### FinalizeBatch
```solidity
event FinalizeBatch(uint256 indexed batchIndex, bytes32 indexed batchHash, bytes32 stateRoot, bytes32 withdrawRoot)
```
Emitted when a batch is finalized.
#### Parameters
| Name | Type | Description |
|---|---|---|
| batchIndex `indexed` | uint256 | undefined |
| batchHash `indexed` | bytes32 | undefined |
| stateRoot | bytes32 | undefined |
| withdrawRoot | bytes32 | undefined |
### Initialized
```solidity
event Initialized(uint8 version)
```
#### Parameters
| Name | Type | Description |
|---|---|---|
| version | uint8 | undefined |
### OwnershipTransferred
```solidity
event OwnershipTransferred(address indexed previousOwner, address indexed newOwner)
```
#### Parameters
| Name | Type | Description |
|---|---|---|
| previousOwner `indexed` | address | undefined |
| newOwner `indexed` | address | undefined |
### Paused
```solidity
event Paused(address account)
```
#### Parameters
| Name | Type | Description |
|---|---|---|
| account | address | undefined |
### RevertBatch
```solidity
event RevertBatch(uint256 indexed batchIndex, bytes32 indexed batchHash)
```
revert a pending batch.
#### Parameters
| Name | Type | Description |
|---|---|---|
| batchIndex `indexed` | uint256 | undefined |
| batchHash `indexed` | bytes32 | undefined |
### Unpaused
```solidity
event Unpaused(address account)
```
#### Parameters
| Name | Type | Description |
|---|---|---|
| account | address | undefined |
### UpdateMaxNumTxInChunk
```solidity
event UpdateMaxNumTxInChunk(uint256 oldMaxNumTxInChunk, uint256 newMaxNumTxInChunk)
```
Emitted when the value of `maxNumTxInChunk` is updated.
#### Parameters
| Name | Type | Description |
|---|---|---|
| oldMaxNumTxInChunk | uint256 | The old value of `maxNumTxInChunk`. |
| newMaxNumTxInChunk | uint256 | The new value of `maxNumTxInChunk`. |
### UpdateProver
```solidity
event UpdateProver(address indexed account, bool status)
```
Emitted when owner updates the status of prover.
#### Parameters
| Name | Type | Description |
|---|---|---|
| account `indexed` | address | The address of account updated. |
| status | bool | The status of the account updated. |
### UpdateSequencer
```solidity
event UpdateSequencer(address indexed account, bool status)
```
Emitted when owner updates the status of sequencer.
#### Parameters
| Name | Type | Description |
|---|---|---|
| account `indexed` | address | The address of account updated. |
| status | bool | The status of the account updated. |
### UpdateVerifier
```solidity
event UpdateVerifier(address indexed oldVerifier, address indexed newVerifier)
```
Emitted when the address of rollup verifier is updated.
#### Parameters
| Name | Type | Description |
|---|---|---|
| oldVerifier `indexed` | address | The address of old rollup verifier. |
| newVerifier `indexed` | address | The address of new rollup verifier. |

Binary file not shown.

Before

Width:  |  Height:  |  Size: 684 KiB

View File

@@ -84,11 +84,14 @@ const config: HardhatUserConfig = {
etherscan: {
apiKey: process.env.ETHERSCAN_API_KEY,
},
mocha: {
timeout: 10000000,
},
dodoc: {
runOnCompile: true,
keepFileStructure: false,
include: [
"ZKRollup",
"ScrollChain",
"L1ScrollMessenger",
"L2ScrollMessenger",
"L1GatewayRouter",
@@ -115,8 +118,9 @@ const config: HardhatUserConfig = {
"IL1ERC1155Gateway",
"IL2ERC1155Gateway",
"IScrollStandardERC20Factory",
"IZKRollup",
"WrappedEther",
"IScrollChain",
"ScrollChainCommitmentVerifier",
"WETH9",
],
},
};

View File

@@ -54,7 +54,7 @@ describe("EnforcedTxGateway.spec", async () => {
await queue.initialize(constants.AddressZero, constants.AddressZero, gateway.address, oracle.address, 10000000);
await gateway.initialize(queue.address, feeVault.address);
await oracle.initialize(21000, 0, 8, 16);
await oracle.initialize(21000, 51000, 8, 16);
const Whitelist = await ethers.getContractFactory("Whitelist", deployer);
const whitelist = await Whitelist.deploy(deployer.address);

View File

@@ -1,7 +1,7 @@
/* eslint-disable node/no-unpublished-import */
/* eslint-disable node/no-missing-import */
import { ethers } from "hardhat";
import { GasSwap, MinimalForwarder, MockERC20, MockGasSwapTarget } from "../typechain";
import { GasSwap, ERC2771Forwarder, MockERC20, MockGasSwapTarget } from "../typechain";
import { SignerWithAddress } from "@nomiclabs/hardhat-ethers/signers";
import { expect } from "chai";
import { BigNumber, constants } from "ethers";
@@ -11,7 +11,7 @@ describe("GasSwap.spec", async () => {
let deployer: SignerWithAddress;
let signer: SignerWithAddress;
let forwarder: MinimalForwarder;
let forwarder: ERC2771Forwarder;
let swap: GasSwap;
let target: MockGasSwapTarget;
let token: MockERC20;
@@ -19,8 +19,8 @@ describe("GasSwap.spec", async () => {
beforeEach(async () => {
[deployer, signer] = await ethers.getSigners();
const MinimalForwarder = await ethers.getContractFactory("MinimalForwarder", deployer);
forwarder = await MinimalForwarder.deploy();
const ERC2771Forwarder = await ethers.getContractFactory("ERC2771Forwarder", deployer);
forwarder = await ERC2771Forwarder.deploy("ERC2771Forwarder");
await forwarder.deployed();
const GasSwap = await ethers.getContractFactory("GasSwap", deployer);
@@ -253,12 +253,13 @@ describe("GasSwap.spec", async () => {
await swap.updateFeeRatio(ethers.utils.parseEther(feeRatio).div(100));
const fee = amountOut.mul(feeRatio).div(100);
const req = {
const reqWithoutSignature = {
from: signer.address,
to: swap.address,
value: constants.Zero,
gas: 1000000,
nonce: 0,
nonce: await forwarder.nonces(signer.address),
deadline: 2000000000,
data: swap.interface.encodeFunctionData("swap", [
{
token: token.address,
@@ -278,8 +279,8 @@ describe("GasSwap.spec", async () => {
const signature = await signer._signTypedData(
{
name: "MinimalForwarder",
version: "0.0.1",
name: "ERC2771Forwarder",
version: "1",
chainId: (await ethers.provider.getNetwork()).chainId,
verifyingContract: forwarder.address,
},
@@ -305,17 +306,29 @@ describe("GasSwap.spec", async () => {
name: "nonce",
type: "uint256",
},
{
name: "deadline",
type: "uint48",
},
{
name: "data",
type: "bytes",
},
],
},
req
reqWithoutSignature
);
const balanceBefore = await signer.getBalance();
await forwarder.execute(req, signature);
await forwarder.execute({
from: reqWithoutSignature.from,
to: reqWithoutSignature.to,
value: reqWithoutSignature.value,
gas: reqWithoutSignature.gas,
deadline: reqWithoutSignature.deadline,
data: reqWithoutSignature.data,
signature,
});
const balanceAfter = await signer.getBalance();
expect(balanceAfter.sub(balanceBefore)).to.eq(amountOut.sub(fee));
expect(await token.balanceOf(signer.address)).to.eq(amountIn.mul(refundRatio).div(100));

View File

@@ -42,7 +42,7 @@ describe("L1MessageQueue", async () => {
deployer
);
await oracle.initialize(21000, 0, 8, 16);
await oracle.initialize(21000, 50000, 8, 16);
await queue.initialize(messenger.address, scrollChain.address, gateway.address, oracle.address, 10000000);
});
@@ -264,8 +264,8 @@ describe("L1MessageQueue", async () => {
});
it("should succeed", async () => {
// append 100 messages
for (let i = 0; i < 100; i++) {
// append 512 messages
for (let i = 0; i < 256 * 2; i++) {
await queue.connect(messenger).appendCrossDomainMessage(constants.AddressZero, 1000000, "0x");
}
@@ -274,7 +274,8 @@ describe("L1MessageQueue", async () => {
.to.emit(queue, "DequeueTransaction")
.withArgs(0, 50, 0);
for (let i = 0; i < 50; i++) {
expect(await queue.getCrossDomainMessage(i)).to.eq(constants.HashZero);
expect(await queue.isMessageSkipped(i)).to.eq(false);
expect(await queue.isMessageDropped(i)).to.eq(false);
}
expect(await queue.pendingQueueIndex()).to.eq(50);
@@ -284,7 +285,8 @@ describe("L1MessageQueue", async () => {
.withArgs(50, 10, 1023);
expect(await queue.pendingQueueIndex()).to.eq(60);
for (let i = 50; i < 60; i++) {
expect(BigNumber.from(await queue.getCrossDomainMessage(i))).to.gt(constants.Zero);
expect(await queue.isMessageSkipped(i)).to.eq(true);
expect(await queue.isMessageDropped(i)).to.eq(false);
}
// pop 20 messages, skip first 5
@@ -293,10 +295,27 @@ describe("L1MessageQueue", async () => {
.withArgs(60, 20, 31);
expect(await queue.pendingQueueIndex()).to.eq(80);
for (let i = 60; i < 65; i++) {
expect(BigNumber.from(await queue.getCrossDomainMessage(i))).to.gt(constants.Zero);
expect(await queue.isMessageSkipped(i)).to.eq(true);
expect(await queue.isMessageDropped(i)).to.eq(false);
}
for (let i = 65; i < 80; i++) {
expect(await queue.getCrossDomainMessage(i)).to.eq(constants.HashZero);
expect(await queue.isMessageSkipped(i)).to.eq(false);
expect(await queue.isMessageDropped(i)).to.eq(false);
}
// pop 256 messages with random skip
const bitmap = BigNumber.from("0x496525059c3f33758d17030403e45afe067b8a0ae1317cda0487fd2932cbea1a");
const tx = await queue.connect(scrollChain).popCrossDomainMessage(80, 256, bitmap);
await expect(tx).to.emit(queue, "DequeueTransaction").withArgs(80, 256, bitmap);
console.log("gas used:", (await tx.wait()).gasUsed.toString());
for (let i = 80; i < 80 + 256; i++) {
expect(await queue.isMessageSkipped(i)).to.eq(
bitmap
.shr(i - 80)
.and(1)
.eq(1)
);
expect(await queue.isMessageDropped(i)).to.eq(false);
}
});
});
@@ -308,7 +327,7 @@ describe("L1MessageQueue", async () => {
);
});
it("should revert, when drop executed message", async () => {
it("should revert, when drop non-skipped message", async () => {
// append 10 messages
for (let i = 0; i < 10; i++) {
await queue.connect(messenger).appendCrossDomainMessage(constants.AddressZero, 1000000, "0x");
@@ -318,14 +337,13 @@ describe("L1MessageQueue", async () => {
.to.emit(queue, "DequeueTransaction")
.withArgs(0, 5, 0);
for (let i = 0; i < 5; i++) {
expect(await queue.getCrossDomainMessage(i)).to.eq(constants.HashZero);
expect(await queue.isMessageSkipped(i)).to.eq(false);
expect(await queue.isMessageDropped(i)).to.eq(false);
}
expect(await queue.pendingQueueIndex()).to.eq(5);
for (let i = 0; i < 5; i++) {
await expect(queue.connect(messenger).dropCrossDomainMessage(i)).to.revertedWith(
"message already dropped or executed"
);
await expect(queue.connect(messenger).dropCrossDomainMessage(i)).to.revertedWith("drop non-skipped message");
}
// drop pending message
@@ -345,9 +363,12 @@ describe("L1MessageQueue", async () => {
.withArgs(0, 10, 0x3ff);
for (let i = 0; i < 10; i++) {
expect(BigNumber.from(await queue.getCrossDomainMessage(i))).to.gt(constants.Zero);
expect(await queue.isMessageSkipped(i)).to.eq(true);
expect(await queue.isMessageDropped(i)).to.eq(false);
await expect(queue.connect(messenger).dropCrossDomainMessage(i)).to.emit(queue, "DropTransaction").withArgs(i);
expect(await queue.getCrossDomainMessage(i)).to.eq(constants.HashZero);
await expect(queue.connect(messenger).dropCrossDomainMessage(i)).to.revertedWith("message already dropped");
expect(await queue.isMessageSkipped(i)).to.eq(true);
expect(await queue.isMessageDropped(i)).to.eq(true);
}
});
});

View File

@@ -1,5 +1,6 @@
/* eslint-disable node/no-unpublished-import */
/* eslint-disable node/no-missing-import */
import { concat } from "ethers/lib/utils";
import { constants } from "ethers";
import { ethers } from "hardhat";
import { ScrollChain, L1MessageQueue } from "../typechain";
@@ -11,22 +12,27 @@ describe("ScrollChain", async () => {
beforeEach(async () => {
const [deployer] = await ethers.getSigners();
const ProxyAdmin = await ethers.getContractFactory("ProxyAdmin", deployer);
const admin = await ProxyAdmin.deploy();
await admin.deployed();
const TransparentUpgradeableProxy = await ethers.getContractFactory("TransparentUpgradeableProxy", deployer);
const L1MessageQueue = await ethers.getContractFactory("L1MessageQueue", deployer);
queue = await L1MessageQueue.deploy();
await queue.deployed();
const queueImpl = await L1MessageQueue.deploy();
await queueImpl.deployed();
const queueProxy = await TransparentUpgradeableProxy.deploy(queueImpl.address, admin.address, "0x");
await queueProxy.deployed();
queue = await ethers.getContractAt("L1MessageQueue", queueProxy.address, deployer);
const RollupVerifier = await ethers.getContractFactory("RollupVerifier", deployer);
const verifier = await RollupVerifier.deploy();
await verifier.deployed();
const ScrollChain = await ethers.getContractFactory("ScrollChain", deployer);
const chainImpl = await ScrollChain.deploy(0);
await chainImpl.deployed();
const chainProxy = await TransparentUpgradeableProxy.deploy(chainImpl.address, admin.address, "0x");
await chainProxy.deployed();
chain = await ethers.getContractAt("ScrollChain", chainProxy.address, deployer);
const ScrollChain = await ethers.getContractFactory("ScrollChain", {
signer: deployer,
libraries: { RollupVerifier: verifier.address },
});
chain = await ScrollChain.deploy(0);
await chain.deployed();
await chain.initialize(queue.address, constants.AddressZero, 44);
await chain.initialize(queue.address, constants.AddressZero, 100);
await chain.addSequencer(deployer.address);
await queue.initialize(
constants.AddressZero,
@@ -38,79 +44,54 @@ describe("ScrollChain", async () => {
});
// @note skip this benchmark tests
/*
it("should succeed", async () => {
await chain.importGenesisBatch({
blocks: [
{
blockHash: "0x92826bd3aad2ef70d8061dc4e25150b305d1233d9cd7579433a77d6eb01dae1c",
parentHash: constants.HashZero,
blockNumber: 0,
timestamp: 1639724192,
baseFee: 1000000000,
gasLimit: 940000000,
numTransactions: 0,
numL1Messages: 0,
},
],
prevStateRoot: constants.HashZero,
newStateRoot: "0x1b186a7a90ec3b41a2417062fe44dce8ce82ae76bfbb09eae786a4f1be1895f5",
withdrawTrieRoot: constants.HashZero,
batchIndex: 0,
parentBatchHash: constants.HashZero,
l2Transactions: [],
});
const parentBatchHash = await chain.lastFinalizedBatchHash();
it.skip("should succeed", async () => {
const batchHeader0 = new Uint8Array(89);
batchHeader0[25] = 1;
await chain.importGenesisBatch(batchHeader0, "0x0000000000000000000000000000000000000000000000000000000000000001");
const parentBatchHash = await chain.committedBatches(0);
console.log("genesis batch hash:", parentBatchHash);
console.log(`ChunkPerBatch`, `BlockPerChunk`, `TxPerBlock`, `BytesPerTx`, `TotalBytes`, `EstimateGas`);
for (let numChunks = 3; numChunks <= 6; ++numChunks) {
for (let numBlocks = 1; numBlocks <= 5; ++numBlocks) {
for (let numTx = 20; numTx <= Math.min(30, 100 / numBlocks); ++numTx) {
for (let txLength = 800; txLength <= 1000; txLength += 100) {
const txs: Array<Uint8Array> = [];
for (let i = 0; i < numTx; i++) {
const tx = new Uint8Array(4 + txLength);
let offset = 3;
for (let x = txLength; x > 0; x = Math.floor(x / 256)) {
tx[offset] = x % 256;
offset -= 1;
}
tx.fill(1, 4);
txs.push(tx);
}
const chunk = new Uint8Array(1 + 60 * numBlocks);
chunk[0] = numBlocks;
for (let i = 0; i < numBlocks; i++) {
chunk[1 + i * 60 + 57] = numTx;
}
const chunks: Array<Uint8Array> = [];
for (let i = 0; i < numChunks; i++) {
const txsInChunk: Array<Uint8Array> = [];
for (let j = 0; j < numBlocks; j++) {
txsInChunk.push(concat(txs));
}
chunks.push(concat([chunk, concat(txsInChunk)]));
}
for (let numTx = 1; numTx <= 25; ++numTx) {
for (let txLength = 100; txLength <= 1000; txLength += 100) {
const txs: Array<Uint8Array> = [];
for (let i = 0; i < numTx; i++) {
const tx = new Uint8Array(4 + txLength);
let offset = 3;
for (let x = txLength; x > 0; x = Math.floor(x / 256)) {
tx[offset] = x % 256;
offset -= 1;
const estimateGas = await chain.estimateGas.commitBatch(0, batchHeader0, chunks, "0x");
console.log(
`${numChunks}`,
`${numBlocks}`,
`${numTx}`,
`${txLength}`,
`${numChunks * numBlocks * numTx * (txLength + 1)}`,
`${estimateGas.toString()}`
);
}
tx.fill(1, 4);
txs.push(tx);
}
const batch = {
blocks: [
{
blockHash: "0xb5baa665b2664c3bfed7eb46e00ebc110ecf2ebd257854a9bf2b9dbc9b2c08f6",
parentHash: "0x92826bd3aad2ef70d8061dc4e25150b305d1233d9cd7579433a77d6eb01dae1c",
blockNumber: 1,
timestamp: numTx * 100000 + txLength,
baseFee: 0,
gasLimit: 0,
numTransactions: 0,
numL1Messages: 0,
},
],
prevStateRoot: "0x1b186a7a90ec3b41a2417062fe44dce8ce82ae76bfbb09eae786a4f1be1895f5",
newStateRoot: "0xb5baa665b2664c3bfed7eb46e00ebc110ecf2ebd257854a9bf2b9dbc9b2c08f6",
withdrawTrieRoot: "0xb5baa665b2664c3bfed7eb46e00ebc110ecf2ebd257854a9bf2b9dbc9b2c08f6",
batchIndex: 1,
parentBatchHash: parentBatchHash,
l2Transactions: concat(txs),
};
const estimateGas = await chain.estimateGas.commitBatch(batch);
const tx = await chain.commitBatch(batch, { gasLimit: estimateGas.mul(12).div(10) });
const receipt = await tx.wait();
console.log(
"Commit batch with l2TransactionsBytes:",
numTx * (txLength + 4),
"gasLimit:",
tx.gasLimit.toString(),
"estimateGas:",
estimateGas.toString(),
"gasUsed:",
receipt.gasUsed.toString()
);
}
}
});
*/
});

View File

@@ -55,7 +55,7 @@
"typescript": "^4.5.2"
},
"dependencies": {
"@openzeppelin/contracts": "^v4.9.2",
"@openzeppelin/contracts": "^v4.9.3",
"@openzeppelin/contracts-upgradeable": "^v4.9.2"
},
"lint-staged": {

View File

@@ -22,7 +22,7 @@ contract InitializeL1BridgeContracts is Script {
uint256 L1_DEPLOYER_PRIVATE_KEY = vm.envUint("L1_DEPLOYER_PRIVATE_KEY");
uint256 CHAIN_ID_L2 = vm.envUint("CHAIN_ID_L2");
uint256 MAX_L2_TX_IN_CHUNK = vm.envUint("MAX_L2_TX_IN_CHUNK");
uint256 MAX_TX_IN_CHUNK = vm.envUint("MAX_TX_IN_CHUNK");
uint256 MAX_L1_MESSAGE_GAS_LIMIT = vm.envUint("MAX_L1_MESSAGE_GAS_LIMIT");
address L1_COMMIT_SENDER_ADDRESS = vm.envAddress("L1_COMMIT_SENDER_ADDRESS");
address L1_FINALIZE_SENDER_ADDRESS = vm.envAddress("L1_FINALIZE_SENDER_ADDRESS");
@@ -67,7 +67,7 @@ contract InitializeL1BridgeContracts is Script {
ScrollChain(L1_SCROLL_CHAIN_PROXY_ADDR).initialize(
L1_MESSAGE_QUEUE_PROXY_ADDR,
L1_MULTIPLE_VERSION_ROLLUP_VERIFIER_ADDR,
MAX_L2_TX_IN_CHUNK
MAX_TX_IN_CHUNK
);
ScrollChain(L1_SCROLL_CHAIN_PROXY_ADDR).addSequencer(L1_COMMIT_SENDER_ADDRESS);
ScrollChain(L1_SCROLL_CHAIN_PROXY_ADDR).addProver(L1_FINALIZE_SENDER_ADDRESS);

View File

@@ -9,11 +9,12 @@ import {IL1ERC20Gateway} from "./IL1ERC20Gateway.sol";
import {IL1GatewayRouter} from "./IL1GatewayRouter.sol";
import {IL2ERC20Gateway} from "../../L2/gateways/IL2ERC20Gateway.sol";
import {IScrollMessenger} from "../../libraries/IScrollMessenger.sol";
import {ScrollConstants} from "../../libraries/constants/ScrollConstants.sol";
import {ScrollGatewayBase} from "../../libraries/gateway/ScrollGatewayBase.sol";
import {IMessageDropCallback} from "../../libraries/callbacks/IMessageDropCallback.sol";
/// @title L1ERC20Gateway
/// @notice The `L1ERC20Gateway` as a base contract for ERC20 gateways in L1.
/// It has implementation of common used functions for ERC20 gateways.
abstract contract L1ERC20Gateway is IL1ERC20Gateway, IMessageDropCallback, ScrollGatewayBase {
using SafeERC20Upgradeable for IERC20Upgradeable;

View File

@@ -2,9 +2,6 @@
pragma solidity =0.8.16;
import {OwnableUpgradeable} from "@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol";
import {IERC20Upgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC20/IERC20Upgradeable.sol";
import {IFiatToken} from "../../../interfaces/IFiatToken.sol";
import {IUSDCBurnableSourceBridge} from "../../../interfaces/IUSDCBurnableSourceBridge.sol";
import {IL2ERC20Gateway} from "../../../L2/gateways/IL2ERC20Gateway.sol";

View File

@@ -72,6 +72,14 @@ interface IL1MessageQueue {
bytes calldata data
) external view returns (bytes32);
/// @notice Return whether the message is skipped.
/// @param queueIndex The queue index of the message to check.
function isMessageSkipped(uint256 queueIndex) external view returns (bool);
/// @notice Return whether the message is dropped.
/// @param queueIndex The queue index of the message to check.
function isMessageDropped(uint256 queueIndex) external view returns (bool);
/*****************************
* Public Mutating Functions *
*****************************/

View File

@@ -3,6 +3,7 @@
pragma solidity =0.8.16;
import {OwnableUpgradeable} from "@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol";
import {BitMapsUpgradeable} from "@openzeppelin/contracts-upgradeable/utils/structs/BitMapsUpgradeable.sol";
import {IL2GasPriceOracle} from "./IL2GasPriceOracle.sol";
import {IL1MessageQueue} from "./IL1MessageQueue.sol";
@@ -17,6 +18,8 @@ import {AddressAliasHelper} from "../../libraries/common/AddressAliasHelper.sol"
/// @notice This contract will hold all L1 to L2 messages.
/// Each appended message is assigned with a unique and increasing `uint256` index.
contract L1MessageQueue is OwnableUpgradeable, IL1MessageQueue {
using BitMapsUpgradeable for BitMapsUpgradeable.BitMap;
/**********
* Events *
**********/
@@ -61,6 +64,12 @@ contract L1MessageQueue is OwnableUpgradeable, IL1MessageQueue {
/// @notice The max gas limit of L1 transactions.
uint256 public maxGasLimit;
/// @dev The bitmap for skipped messages.
BitMapsUpgradeable.BitMap private droppedMessageBitmap;
/// @dev The bitmap for skipped messages, where `skippedMessageBitmap[i]` keeps the bits from `[i*256, (i+1)*256)`.
mapping(uint256 => uint256) private skippedMessageBitmap;
/**********************
* Function Modifiers *
**********************/
@@ -256,6 +265,19 @@ contract L1MessageQueue is OwnableUpgradeable, IL1MessageQueue {
return hash;
}
/// @inheritdoc IL1MessageQueue
function isMessageSkipped(uint256 _queueIndex) external view returns (bool) {
if (_queueIndex >= pendingQueueIndex) return false;
return _isMessageSkipped(_queueIndex);
}
/// @inheritdoc IL1MessageQueue
function isMessageDropped(uint256 _queueIndex) external view returns (bool) {
// it should be a skipped message first.
return _isMessageSkipped(_queueIndex) && droppedMessageBitmap.get(_queueIndex);
}
/*****************************
* Public Mutating Functions *
*****************************/
@@ -305,10 +327,15 @@ contract L1MessageQueue is OwnableUpgradeable, IL1MessageQueue {
require(pendingQueueIndex == _startIndex, "start index mismatch");
unchecked {
for (uint256 i = 0; i < _count; i++) {
if ((_skippedBitmap >> i) & 1 == 0) {
messageQueue[_startIndex + i] = bytes32(0);
}
// clear extra bits in `_skippedBitmap`, and if _count = 256, it's designed to overflow.
uint256 mask = (1 << _count) - 1;
_skippedBitmap &= mask;
uint256 bucket = _startIndex >> 8;
uint256 offset = _startIndex & 0xff;
skippedMessageBitmap[bucket] |= _skippedBitmap << offset;
if (offset + _count > 256) {
skippedMessageBitmap[bucket + 1] = _skippedBitmap >> (256 - offset);
}
pendingQueueIndex = _startIndex + _count;
@@ -320,9 +347,10 @@ contract L1MessageQueue is OwnableUpgradeable, IL1MessageQueue {
/// @inheritdoc IL1MessageQueue
function dropCrossDomainMessage(uint256 _index) external onlyMessenger {
require(_index < pendingQueueIndex, "cannot drop pending message");
require(messageQueue[_index] != bytes32(0), "message already dropped or executed");
messageQueue[_index] = bytes32(0);
require(_isMessageSkipped(_index), "drop non-skipped message");
require(!droppedMessageBitmap.get(_index), "message already dropped");
droppedMessageBitmap.set(_index);
emit DropTransaction(_index);
}
@@ -393,4 +421,11 @@ contract L1MessageQueue is OwnableUpgradeable, IL1MessageQueue {
uint256 intrinsicGas = calculateIntrinsicGasFee(_calldata);
require(_gasLimit >= intrinsicGas, "Insufficient gas limit, must be above intrinsic gas");
}
/// @dev Returns whether the bit at `index` is set.
function _isMessageSkipped(uint256 index) internal view returns (bool) {
uint256 bucket = index >> 8;
uint256 mask = 1 << (index & 0xff);
return skippedMessageBitmap[bucket] & mask != 0;
}
}

View File

@@ -8,6 +8,8 @@ import {IWhitelist} from "../../libraries/common/IWhitelist.sol";
import {IL2GasPriceOracle} from "./IL2GasPriceOracle.sol";
// solhint-disable reason-string
contract L2GasPriceOracle is OwnableUpgradeable, IL2GasPriceOracle {
/**********
* Events *
@@ -41,9 +43,13 @@ contract L2GasPriceOracle is OwnableUpgradeable, IL2GasPriceOracle {
IWhitelist public whitelist;
struct IntrinsicParams {
// The intrinsic gas for transaction.
uint64 txGas;
// The intrinsic gas for contract creation. It is reserved for future use.
uint64 txGasContractCreation;
// The intrinsic gas for each zero byte.
uint64 zeroGas;
// The intrinsic gas for each nonzero byte.
uint64 nonZeroGas;
}
@@ -66,12 +72,7 @@ contract L2GasPriceOracle is OwnableUpgradeable, IL2GasPriceOracle {
) external initializer {
OwnableUpgradeable.__Ownable_init();
intrinsicParams = IntrinsicParams({
txGas: _txGas,
txGasContractCreation: _txGasContractCreation,
zeroGas: _zeroGas,
nonZeroGas: _nonZeroGas
});
_setIntrinsicParams(_txGas, _txGasContractCreation, _zeroGas, _nonZeroGas);
}
/*************************
@@ -117,17 +118,10 @@ contract L2GasPriceOracle is OwnableUpgradeable, IL2GasPriceOracle {
uint64 _txGasContractCreation,
uint64 _zeroGas,
uint64 _nonZeroGas
) public {
) external {
require(whitelist.isSenderAllowed(msg.sender), "Not whitelisted sender");
intrinsicParams = IntrinsicParams({
txGas: _txGas,
txGasContractCreation: _txGasContractCreation,
zeroGas: _zeroGas,
nonZeroGas: _nonZeroGas
});
emit IntrinsicParamsUpdated(_txGas, _txGasContractCreation, _zeroGas, _nonZeroGas);
_setIntrinsicParams(_txGas, _txGasContractCreation, _zeroGas, _nonZeroGas);
}
/// @notice Allows the owner to modify the l2 base fee.
@@ -154,4 +148,34 @@ contract L2GasPriceOracle is OwnableUpgradeable, IL2GasPriceOracle {
whitelist = IWhitelist(_newWhitelist);
emit UpdateWhitelist(_oldWhitelist, _newWhitelist);
}
/**********************
* Internal Functions *
**********************/
/// @dev Internal function to update parameters for intrinsic gas calculation.
/// @param _txGas The intrinsic gas for transaction.
/// @param _txGasContractCreation The intrinsic gas for contract creation.
/// @param _zeroGas The intrinsic gas for each zero byte.
/// @param _nonZeroGas The intrinsic gas for each nonzero byte.
function _setIntrinsicParams(
uint64 _txGas,
uint64 _txGasContractCreation,
uint64 _zeroGas,
uint64 _nonZeroGas
) internal {
require(_txGas > 0, "txGas is zero");
require(_zeroGas > 0, "zeroGas is zero");
require(_nonZeroGas > 0, "nonZeroGas is zero");
require(_txGasContractCreation > _txGas, "txGasContractCreation is less than txGas");
intrinsicParams = IntrinsicParams({
txGas: _txGas,
txGasContractCreation: _txGasContractCreation,
zeroGas: _zeroGas,
nonZeroGas: _nonZeroGas
});
emit IntrinsicParamsUpdated(_txGas, _txGasContractCreation, _zeroGas, _nonZeroGas);
}
}

View File

@@ -40,6 +40,9 @@ contract L2ScrollMessenger is ScrollMessengerBase, IL2ScrollMessenger {
/// @notice Mapping from L1 message hash to a boolean value indicating if the message has been successfully executed.
mapping(bytes32 => bool) public isL1MessageExecuted;
/// @dev The storage slots used by previous versions of this contract.
uint256[2] private __used;
/***************
* Constructor *
***************/

View File

@@ -12,7 +12,7 @@ import {IL1ERC20Gateway} from "../../../L1/gateways/IL1ERC20Gateway.sol";
import {IL2ScrollMessenger} from "../../IL2ScrollMessenger.sol";
import {IL2ERC20Gateway} from "../IL2ERC20Gateway.sol";
import {ScrollGatewayBase, IScrollGateway} from "../../../libraries/gateway/ScrollGatewayBase.sol";
import {ScrollGatewayBase} from "../../../libraries/gateway/ScrollGatewayBase.sol";
import {L2ERC20Gateway} from "../L2ERC20Gateway.sol";
/// @title L2USDCGateway
@@ -57,6 +57,10 @@ contract L2USDCGateway is L2ERC20Gateway, IUSDCDestinationBridge {
l2USDC = _l2USDC;
}
/// @notice Initializer for `L2USDCGateway`.
/// @param _counterpart The address of `L1USDCGateway` contract in L1.
/// @param _router The address of `L2GatewayRouter` contract in L2.
/// @param _messenger The address of `L2ScrollMessenger` contract in L2.
function initialize(
address _counterpart,
address _router,
@@ -158,6 +162,9 @@ contract L2USDCGateway is L2ERC20Gateway, IUSDCDestinationBridge {
}
require(_data.length == 0, "call is not allowed");
// rate limit
_addUsedAmount(_token, _amount);
// 2. Transfer token into this contract.
IERC20Upgradeable(_token).safeTransferFrom(_from, address(this), _amount);
IFiatToken(_token).burn(_amount);
@@ -169,7 +176,7 @@ contract L2USDCGateway is L2ERC20Gateway, IUSDCDestinationBridge {
(_l1USDC, _token, _from, _to, _amount, _data)
);
// 4. Send message to L1ScrollMessenger.
// 4. Send message to L2ScrollMessenger.
IL2ScrollMessenger(messenger).sendMessage{value: msg.value}(counterpart, 0, _message, _gasLimit);
emit WithdrawERC20(_l1USDC, _token, _from, _to, _amount, _data);

View File

@@ -104,6 +104,9 @@ contract GasSwap is ERC2771Context, ReentrancyGuard, OwnableBase {
_permit.s
);
// record token balance in this contract
uint256 _balance = IERC20(_permit.token).balanceOf(address(this));
// transfer token
IERC20(_permit.token).safeTransferFrom(_sender, address(this), _permit.value);
@@ -128,7 +131,7 @@ contract GasSwap is ERC2771Context, ReentrancyGuard, OwnableBase {
require(_success, "transfer ETH failed");
// refund rest token
uint256 _dust = IERC20(_permit.token).balanceOf(address(this));
uint256 _dust = IERC20(_permit.token).balanceOf(address(this)) - _balance;
if (_dust > 0) {
IERC20(_permit.token).safeTransfer(_sender, _dust);
}

View File

@@ -2,7 +2,11 @@
pragma solidity ^0.8.16;
/// @title IMessageTransmitter
/// @notice The interface of `MessageTransmitter` of Circle's Cross-Chain Transfer Protocol (CCTP).
interface IMessageTransmitter {
/// @notice Compute the nonce of a message.
/// @param _sourceAndNonce The bytes contains source and nonce.
function usedNonces(bytes32 _sourceAndNonce) external view returns (uint256);
/**

Some files were not shown because too many files have changed in this diff Show More