1 Commits

Author SHA1 Message Date
kautukkundan
e4e3309b30 added eip-712 to verification gateway 2022-05-26 10:50:00 +05:30
488 changed files with 20736 additions and 66744 deletions

View File

@@ -1,47 +0,0 @@
name: Build & Upload Extension
description: Builds & uploads extension for a broswer to a Github release
inputs:
node-version:
description: 'NodeJS version to use for setup & build'
required: true
browser:
description: 'Which browser to build the extension for'
required: true
file-name:
description: 'The name of the browser asset to upload'
required: true
tag-name:
description: 'Tag name of the release. Commonly github.ref in an on.release workflow'
required: true
runs:
using: composite
steps:
- uses: actions/setup-node@v3
with:
node-version: ${{ inputs.node-version }}
cache: yarn
cache-dependency-path: extension/yarn.lock
- working-directory: ./extension
shell: bash
run: |
envsubst < config.release.json > config.json
yarn install --frozen-lockfile
- working-directory: ./extension
shell: bash
run: |
NETWORK_CONFIGS_DIR=../contracts/networks \
yarn build:${{ inputs.browser }}
- working-directory: ./extension
shell: bash
run: mv ./extension/${{ inputs.file-name }} ./extension/quill-${{ inputs.file-name }}
- uses: svenstaro/upload-release-action@v2
with:
tag: ${{ inputs.tag-name }}
# Note: This path is from repo root
# working-directory is not applied
file: ./extension/extension/quill-${{ inputs.file-name }}
overwrite: true

View File

@@ -1,13 +0,0 @@
name: Local Aggregator Deploy
description: Runs an aggregator instance
runs:
using: composite
steps:
- working-directory: ./aggregator
shell: bash
run: cp .env.test .env
- working-directory: ./aggregator
shell: bash
run: deno run --allow-read --allow-write --allow-env --allow-net ./programs/aggregator.ts 2>&1 | tee -a aggregatorLogs.txt &

View File

@@ -1,9 +0,0 @@
name: Local Contract Deploy
description: Runs a Hardhat node & deploys contracts
runs:
using: composite
steps:
- working-directory: ./contracts
shell: bash
run: yarn start &

View File

@@ -1,13 +0,0 @@
name: Local Contract Deploy
description: Runs a Hardhat node & deploys contracts
runs:
using: composite
steps:
- working-directory: ./contracts
shell: bash
run: yarn hardhat node &
- working-directory: ./contracts
shell: bash
run: yarn hardhat run scripts/deploy_all.ts --network gethDev

View File

@@ -1,24 +0,0 @@
name: Setup Contracts & Clients
description: Sets up contracts & clients
runs:
using: composite
steps:
- uses: actions/setup-node@v3
with:
node-version: 16.x
cache: yarn
cache-dependency-path: |
contracts/yarn.lock
contracts/clients/yarn.lock
- working-directory: ./contracts
shell: bash
run: |
cp .env.example .env
yarn install --frozen-lockfile
yarn build
- working-directory: ./contracts/clients
shell: bash
run: yarn install --frozen-lockfile

22
.github/labeler.yml vendored
View File

@@ -1,25 +1,9 @@
aggregator:
- aggregator/*
- aggregator/**/*
aggregator-proxy:
- aggregator-proxy/*
- aggregator-proxy/**/*
automation:
- .github/*
- .github/**/*
extension:
- extension/*
- extension/**/*
contracts:
- contracts/*
# Don't label client only changes.
- any: ['contracts/**/*', '!contracts/clients/**/*']
# Don't label client only changes.
- any: ['contracts/**/*', '!contracts/clients/**/*']
clients:
- 'contracts/clients/*'
- 'contracts/clients/**/*'
documentation:
- 'docs/*'
- 'docs/**/*'
- '*.md'
- '**/*.md'
- '**/**/*.md'
- contracts/clients/**/*

View File

@@ -1,20 +0,0 @@
categories:
- title: 'aggregator'
label: 'aggregator'
- title: 'aggregator-proxy'
label: 'aggregator-proxy'
- title: 'contracts'
label: 'contracts'
- title: 'clients'
label: 'clients'
- title: 'docs'
label: 'documentation'
- title: 'extension'
label: 'extension'
version-resolver:
default: minor
prerelease: true
template: |
## Whats Changed
$CHANGES

View File

@@ -1,29 +0,0 @@
name: aggregator-dockerhub
on:
push:
branches:
- 'main'
paths:
- 'aggregator/**'
- '.github/workflows/aggregator-dockerhub.yml'
defaults:
run:
working-directory: ./aggregator
env:
DENO_VERSION: 1.x
jobs:
push:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: denoland/setup-deno@v1
with:
deno-version: ${{ env.DENO_VERSION }}
- run: git show HEAD
- run: echo ${{ secrets.DOCKERHUB_TOKEN }} | docker login --username blswalletghactions --password-stdin
- run: ./programs/build.ts --image-name blswallet/aggregator --image-only --also-tag-latest --push

View File

@@ -1,32 +0,0 @@
name: aggregator-proxy
on:
push:
branches:
- 'main'
paths:
- 'aggregator-proxy/**'
pull_request:
paths:
- 'aggregator-proxy/**'
defaults:
run:
working-directory: ./aggregator-proxy
env:
NODEJS_VERSION: 16.x
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: ${{ env.NODEJS_VERSION }}
cache: yarn
cache-dependency-path: aggregator-proxy/yarn.lock
- run: yarn install --frozen-lockfile
- run: yarn build

View File

@@ -1,80 +0,0 @@
name: aggregator
on:
push:
branches:
- 'main'
paths:
- 'aggregator/**'
# Check for breaking changes from contracts
- 'contracts/**'
- '.github/workflows/aggregator.yml'
pull_request:
paths:
- 'aggregator/**'
# Check for breaking changes from contracts
- 'contracts/**'
- '.github/workflows/aggregator.yml'
branches-ignore:
# Changes targeting this branch should be tested+fixed when being merged
# into main
- contract-updates
defaults:
run:
working-directory: ./aggregator
env:
DENO_VERSION: 1.x
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: denoland/setup-deno@v1
with:
deno-version: ${{ env.DENO_VERSION }}
- run: deno lint .
todos-fixmes:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: denoland/setup-deno@v1
with:
deno-version: ${{ env.DENO_VERSION }}
- run: ./programs/lintTodos.ts
typescript:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: denoland/setup-deno@v1
with:
deno-version: ${{ env.DENO_VERSION }}
- run: ./programs/checkTs.ts
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: denoland/setup-deno@v1
with:
deno-version: ${{ env.DENO_VERSION }}
- uses: ./.github/actions/setup-contracts-clients
# Setup node & contracts
- working-directory: ./contracts
run: yarn start &
- working-directory: ./contracts
run: ./scripts/wait-for-rpc.sh
- working-directory: ./contracts
run: ./scripts/wait-for-contract-deploy.sh
- run: cp .env.local.example .env
- run: deno test --allow-net --allow-env --allow-read

View File

@@ -1,28 +0,0 @@
name: clients
on:
push:
branches:
- 'main'
paths:
- 'contracts/clients/**'
pull_request:
paths:
- 'contracts/clients/**'
defaults:
run:
working-directory: ./contracts/clients
env:
DENO_VERSION: 1.x
jobs:
test-unit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup-contracts-clients
- run: yarn test

View File

@@ -1,45 +0,0 @@
name: contracts
on:
push:
branches:
- 'main'
paths:
- 'contracts/**'
- '!contracts/clients/**'
pull_request:
paths:
- 'contracts/**'
- '!contracts/clients/**'
defaults:
run:
working-directory: ./contracts
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup-contracts-clients
- run: yarn lint
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup-contracts-clients
- run: yarn test
# ensure gas measurement script runs
test-gas-measurements:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup-contracts-clients
- uses: ./.github/actions/local-contract-deploy-hardhat
- run: yarn hardhat run ./scripts/measure_gas/run.ts --network gethDev

View File

@@ -1,52 +0,0 @@
name: extension-release
on:
release:
types: [published]
defaults:
run:
working-directory: ./extension
env:
NODEJS_VERSION: 16.x
jobs:
chrome:
runs-on: ubuntu-latest
environment: extension-release
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/build-upload-extension
with:
node-version: ${{ env.NODEJS_VERSION }}
browser: chrome
file-name: chrome.zip
tag-name: ${{ github.ref }}
firefox:
runs-on: ubuntu-latest
environment: extension-release
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/build-upload-extension
with:
node-version: ${{ env.NODEJS_VERSION }}
browser: firefox
file-name: firefox.xpi
tag-name: ${{ github.ref }}
opera:
runs-on: ubuntu-latest
environment: extension-release
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/build-upload-extension
with:
node-version: ${{ env.NODEJS_VERSION }}
browser: opera
file-name: opera.crx
tag-name: ${{ github.ref }}

View File

@@ -1,47 +0,0 @@
name: extension
on:
push:
branches:
- 'main'
paths:
- 'extension/**'
pull_request:
paths:
- 'extension/**'
defaults:
run:
working-directory: ./extension
env:
NODEJS_VERSION: 16.x
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: ${{ env.NODEJS_VERSION }}
cache: yarn
cache-dependency-path: extension/yarn.lock
- run: yarn install --frozen-lockfile
- run: yarn lint
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: ${{ env.NODEJS_VERSION }}
cache: yarn
cache-dependency-path: extension/yarn.lock
- run: cp config.example.json config.json
- run: yarn install --frozen-lockfile
# For now, just check that chrome builds
- run: yarn build:chrome

View File

@@ -1,69 +0,0 @@
name: integration
on:
push:
branches:
- 'main'
paths:
- 'aggregator/**'
# Check for breaking changes from contracts
- 'contracts/**'
- '.github/workflows/integration.yml'
pull_request:
paths:
- 'aggregator/**'
# Check for breaking changes from contracts
- 'contracts/**'
- '.github/workflows/integration.yml'
branches-ignore:
# Changes targeting this branch should be tested+fixed when being merged
# into main
- contract-updates
defaults:
run:
working-directory: ./contracts/clients
env:
DENO_VERSION: 1.x
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup-contracts-clients
- working-directory: ./contracts/clients
run: yarn build
test-integration:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup-contracts-clients
- uses: denoland/setup-deno@v1
with:
deno-version: ${{ env.DENO_VERSION }}
# - name: run geth node and deploy contracts
- uses: ./.github/actions/local-contract-deploy-geth
- working-directory: ./contracts
run: ./scripts/wait-for-contract-deploy.sh
# - name: run aggregator
- uses: ./.github/actions/local-aggregator-deploy
# - name: integration tests
- working-directory: ./contracts
run: yarn test-integration
# - name: upload artifacts
- uses: actions/upload-artifact@v3
if: always()
with:
name: aggregator-logs
path: ./aggregator/aggregatorLogs.txt
retention-days: 5

View File

@@ -6,6 +6,6 @@ jobs:
triage:
runs-on: ubuntu-latest
steps:
- uses: actions/labeler@v4
- uses: actions/labeler@main
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"

View File

@@ -1,24 +0,0 @@
name: Release Drafter
on:
push:
branches:
- main
permissions:
contents: read
jobs:
update_release_draft:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: release-drafter/release-drafter@v5
with:
config-name: release-drafter.yml
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

1
.gitignore vendored
View File

@@ -1,3 +1,2 @@
.data
.DS_Store
.idea

1
.nvmrc
View File

@@ -1 +0,0 @@
lts/*

View File

@@ -1,61 +0,0 @@
# Contribute to BLS Wallet
Thanks for taking the time to contribute to BLS Wallet!
In this guide you will get an overview of the contribution workflow from opening an issue, creating a PR, reviewing, and merging the PR.
## Getting started
To get an overview of the project, see [System Overview](docs/system_overview.md)
To setup the repo for local use, see [Local Development](docs/local_development.md)
## Issues
### Create a new issue
First search for an [existing issue](https://github.com/web3well/bls-wallet/issues). If you find one, add any new insight, helpful context, or some reactions. Otherwise, you can [open a new issue](https://github.com/web3well/bls-wallet/issues/new). Be sure to label it with anything relevant.
### Solve an issue
Search for an [existing issue](https://github.com/github/docs/issues) that is unassigned and interests you. If this is your first time contributing, you may want to choose a [good first issue](https://github.com/web3well/bls-wallet/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22).
## Make Changes
1. [Fork the repo](https://github.com/web3well/bls-wallet/fork)
2. Checkout a new branch
3. Make your changes
### Quality Checks
- You should add new/update test cases for new features or bug fixes to ensure that your changes work properly and will not be broken by other future changes.
- Type checking and code linting should all pass.
- For ambiguous Typescript typing, prefer `unknown` over `any`.
## Commit your update
Commit your changes over one or more commits. It is recommended to format your commit messages as follows:
```
A short summary of what you did
A list or paragraph of more specific details
```
## Pull Request
Create a pull request (PR) from your fork's branch to `main`, filling in the descriptions template including [linking to the issue you are resolving](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue). Feel free to open a draft PR while you are actively working.
Once ready, a BLS Wallet team member will review the PR.
- When run, all Github Actions workflows should succeed.
- All TODO/FIXME comments in code should be resolved, unless marked `merge-ok` with a description/issue link describing how they can be resolved in future work.
- The author of a comment may mark it as [resolved](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/commenting-on-a-pull-request#resolving-conversations) when they are satisfied with a requested change or answer to a question. You are not required to resolve all comments as some may provide good historical information.
## Your PR is merged!
Thanks for your hard work! Accept our heartfelt gratitude and revel in your masterful coding and/or documentational skills.
### Thanks
To [github/docs CONTRIBUTING.md](https://github.com/github/docs/blob/main/CONTRIBUTING.md) for being a great contribution template.

21
LICENSE
View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2021 BLS Wallet
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

191
README.md
View File

@@ -1,58 +1,173 @@
![BLS Wallet](./docs/images/bls-github-banner.svg)
# bls-wallet
## What is BLS Wallet?
An Ethereum Layer 2 smart contract wallet that uses [BLS signatures](https://en.wikipedia.org/wiki/BLS_digital_signature) and aggregated transactions to reduce gas costs.
A set of components to bring lower gas costs to EVM rollups via aggregated [BLS signatures](https://en.wikipedia.org/wiki/BLS_digital_signature). Our smart contract wallet supports recovery, atomic multi-action operations, sponsored transactions and user-controlled upgradability.
You can watch a full end-to-end demo of the project [here](https://www.youtube.com/watch?v=MOQ3sCLP56g).
## Getting Started
- See an [overview](./docs/system_overview.md) of BLS Wallet & how the components work together.
- Use BLS Wallet in [a browser/NodeJS/Deno app](./docs/use_bls_wallet_clients.md).
- Use BLS Wallet in [your L2 dApp](./docs/use_bls_wallet_dapp.md) for cheaper, multi action transactions.
- Use BLS Wallet components and features with an [ethers.js provider and signer](./use_bls_provider.md)
### Setup your development environment
- [Local development](./docs/local_development.md)
- [Remote development](./docs/remote_development.md)
You can watch a full end-to-end demo of the project [here](https://www.youtube.com/watch?v=MOQ3sCLP56g)
## Components
[contracts](./contracts/)
See each component's directory `README` for more details.
Solidity smart contracts for wallets, BLS signature verification, and deployment/testing tools.
![System Overview](images/system-overview.svg)
[aggregator](./aggregator/)
### Aggregator
Service which accepts BLS signed transactions and bundles them into one for submission.
Service which aggregates BLS wallet transactions.
[aggregator-proxy](./aggregator-proxy/)
### Clients
npm package for proxying to another aggregator instance.
TS/JS Client libraries for web apps and services.
[bls-wallet-clients](./contracts/clients/)
### Contracts
npm package which provides easy to use constructs to interact with the contracts and aggregator.
`bls-wallet` Solidity contracts.
[extension](./extension/)
### Extension
Prototype browser extension used to manage BLS Wallets and sign transactions.
Quill browser extension used to manage BLS Wallets and sign transactions.
## Contract Deployments
### Signer
See the [networks directory](./contracts/networks/) for a list of all contract deployment (network) manifests. Have an L2/rollup testnet you'd like BLS Wallet deployed on? [Open an issue](https://github.com/web3well/bls-wallet/issues/new) or [Deploy it yourself](./docs/remote_development.md)
TS/JS BLS Signing lib.
- [Arbitrum Goerli](./contracts/networks/arbitrum-goerli.json)
- [Optimism Goerli](./contracts/networks/optimism-goerli.json)
## Dependencies
## Ways to Contribute
### Required
- [Work on an open issue](https://github.com/web3well/bls-wallet/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22)
- [Use BLS Wallet](./docs/use_bls_wallet_clients.md) in your project and [share it with us](https://github.com/web3well/bls-wallet/discussions)
- [Report a bug or request a feature](https://github.com/web3well/bls-wallet/issues/new)
- [Ask a question or answer an existing one](https://github.com/web3well/bls-wallet/discussions)
- [Try or add to our documentation](https://github.com/web3well/bls-wallet/tree/main/docs)
- [NodeJS](https://nodejs.org)
- [Yarn](https://yarnpkg.com/getting-started/install) (`npm install -g yarn`)
- [Deno](https://deno.land/#installation)
See our [contribution instructions & guidelines](./CONTRIBUTING.md) for more details.
### Optional (Recomended)
- [nvm](https://github.com/nvm-sh/nvm#installing-and-updating)
- [docker-compose](https://docs.docker.com/compose/install/)
- [MetaMask](https://metamask.io/)
## Setup
Run the repo setup script
```sh
./setup.ts
```
Then choose to target either a local Hardhat node or the Arbitrum Testnet.
### Local
Start a local Hardhat node for RPC use.
```sh
cd ./contracts
yarn hardhat node
```
You can use any two of the private keys displayed (PK0 & PK1) to update these values in `./aggregator/.env`.
```
...
PRIVATE_KEY_AGG=PK0
PRIVATE_KEY_ADMIN=PK1
...
```
Set this value in `./contracts/.env` (This mnemonic is special to hardhat and has funds).
```
...
DEPLOYER_MNEMONIC="test test test test test test test test test test test junk"
...
```
Deploy the PrecompileCostEstimator contract.
```sh
yarn hardhat run scripts/0_deploy_precompile_cost_estimator.ts --network gethDev
```
Copy the address that is output.
Update `./contracts/contracts/lib/hubble-contracts/contracts/libs/BLS.sol`'s `COST_ESTIMATOR_ADDRESS` to the value of that address;
```solidity
...
address private constant COST_ESTIMATOR_ADDRESS = 0x57047C275bbCb44D85DFA50AD562bA968EEba95A;
...
```
Deploy all remaining `bls-wallet` contracts.
```sh
yarn hardhat run scripts/deploy_all.ts --network gethDev
```
### Arbitrum Testnet (Rinkeby Arbitrum Testnet)
You will need two ETH addresses with Rinkeby ETH and their private keys (PK0 & PK1) for running the aggregator. It is NOT recommended that you use any primary wallets with ETH Mainnet assets.
You can get Rinkeby ETH at https://app.mycrypto.com/faucet, and transfer it into the Arbitrum testnet via https://bridge.arbitrum.io/. Make sure when doing so that your network is set to Rinkeby in MetaMask.
Update these values in `./aggregator/.env`.
```
RPC_URL=https://rinkeby.arbitrum.io/rpc
...
NETWORK_CONFIG_PATH=../contracts/networks/arbitrum-testnet.json
PRIVATE_KEY_AGG=PK0
PRIVATE_KEY_ADMIN=PK1
...
```
And then update this value in `./extension/.env`.
```
...
DEFAULT_CHAIN_ID=421611
...
```
## Run
```sh
docker-compose up -d postgres # Or see local postgres instructions in ./aggregator/README.md#PostgreSQL
cd ./aggregator
./programs/aggregator.ts
```
In a seperate terminal/shell instance
```sh
cd ./extension
yarn run dev:chrome # or dev:firefox, dev:opera
```
### Chrome
1. Go to Chrome's [extension page](chrome://extensions).
2. Enable `Developer mode`.
3. Either click `Load unpacked extension...` and select `./extension/extension/chrome` or drag that folder into the page.
### Firefox
1. Go to Firefox's [debugging page](about:debugging#/runtime/this-firefox).
2. Click `Load Temporary Add-on...`.
3. Select `./extension/extension/firefox/manifest.json`.
## Testing/using updates to ./clients
### extension
```sh
cd ./contracts/clients
yarn build
yarn link
cd ../extension
yarn link bls-wallet-clients
```
### aggregator
You will need to push up an `@experimental` version to 'bls-wallet-clients' on npm and update the version in `./aggregtor/src/deps.ts` until a local linking solution for deno is found. See https://github.com/alephjs/esm.sh/discussions/216 for details.
In `./contracts/clients` with your changes:
```
yarn publish-experimental
```
Note the `x.y.z-abc1234` version that was output.
Then in `./aggregtor/deps.ts`, change all `from` references for that package.
```typescript
...
} from "https://esm.sh/bls-wallet-clients@x.y.z-abc1234";
...
```

View File

@@ -1,16 +1,5 @@
# Aggregator Proxy
[![npm version](https://img.shields.io/npm/v/bls-wallet-aggregator-proxy)](https://www.npmjs.com/package/bls-wallet-aggregator-proxy)
This package makes it easy to provide an aggregator by proxying another. The primary use-case is to expose a free aggregator based on one that requires payment by augmenting the bundles with transactions that pay `tx.origin`.
## Setup
```sh
npm install bls-wallet-aggregator-proxy
yarn install bls-wallet-aggregator-proxy
```
## Usage
```ts
@@ -20,32 +9,20 @@ import {
// AggregatorProxyCallback,
// ^ Alternatively, for manual control, import AggregatorProxyCallback to
// just generate the req,res callback for use with http.createServer
} from "bls-wallet-aggregator-proxy";
} from 'aggregator-proxy';
runAggregatorProxy(
"https://arbitrum-goerli.blswallet.org",
async (bundle) => {
console.log("proxying bundle", JSON.stringify(bundle, null, 2));
'https://arbitrum-testnet.blswallet.org',
async bundle => {
console.log('proxying bundle', JSON.stringify(bundle, null, 2));
// Return a different/augmented bundle to send to the upstream aggregator
return bundle;
},
8080,
"0.0.0.0",
'0.0.0.0',
() => {
console.log("Proxying aggregator on port 8080");
}
console.log('Proxying aggregator on port 8080');
},
);
```
## Instant wallet example without dapp-sponsored transaction
![Instant wallet without dapp-sponsored transactions](./../docs/images/system-overview/instant-wallet-without-dapp-sponsored-txs.jpg)
## Instant wallet example with dapp-sponsored transaction
![Instant wallet with dapp-sponsored transaction](./../docs/images/system-overview/instant-wallet-with-dapp-sponsored-txs.jpg)
## Example dApp using a proxy aggregator
- https://github.com/JohnGuilding/single-pool-dex

View File

@@ -1,7 +1,7 @@
import { runAggregatorProxy } from "../src";
runAggregatorProxy(
'https://arbitrum-goerli.blswallet.org',
'https://arbitrum-testnet.blswallet.org',
async b => {
console.log('proxying bundle', JSON.stringify(b, null, 2));
return b;

View File

@@ -2,14 +2,10 @@
"name": "bls-wallet-aggregator-proxy",
"version": "0.1.1",
"main": "dist/src/index.js",
"repository": "https://github.com/web3well/bls-wallet/aggregator-proxy",
"repository": "https://github.com/web3well/bls-wallet",
"author": "Andrew Morris",
"license": "MIT",
"private": false,
"engines": {
"node": ">=16.0.0",
"yarn": ">=1.0.0"
},
"scripts": {
"build": "rm -rf dist && tsc"
},
@@ -21,7 +17,7 @@
"@types/koa__cors": "^3.3.0",
"@types/koa__router": "^8.0.11",
"@types/node-fetch": "^2.6.1",
"bls-wallet-clients": "0.9.0-405e23a",
"bls-wallet-clients": "^0.6.0",
"fp-ts": "^2.12.1",
"io-ts": "^2.2.16",
"io-ts-reporters": "^2.0.1",

View File

@@ -9,7 +9,6 @@ const BundleDto = io.type({
),
operations: io.array(io.type({
nonce: io.string,
gas: io.string,
actions: io.array(io.type({
ethValue: io.string,
contractAddress: io.string,

View File

@@ -2,7 +2,22 @@
# yarn lockfile v1
"@ethersproject/abi@5.6.1", "@ethersproject/abi@^5.6.0":
"@ethersproject/abi@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.5.0.tgz#fb52820e22e50b854ff15ce1647cc508d6660613"
integrity sha512-loW7I4AohP5KycATvc0MgujU6JyCHPqHdeoo9z3Nr9xEiNioxa65ccdm1+fsoJhkuhdRtfcL8cfyGamz2AxZ5w==
dependencies:
"@ethersproject/address" "^5.5.0"
"@ethersproject/bignumber" "^5.5.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/constants" "^5.5.0"
"@ethersproject/hash" "^5.5.0"
"@ethersproject/keccak256" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/properties" "^5.5.0"
"@ethersproject/strings" "^5.5.0"
"@ethersproject/abi@5.6.1", "@ethersproject/abi@^5.5.0", "@ethersproject/abi@^5.6.0":
version "5.6.1"
resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.6.1.tgz#f7de888edeb56b0a657b672bdd1b3a1135cd14f7"
integrity sha512-0cqssYh6FXjlwKWBmLm3+zH2BNARoS5u/hxbz+LpQmcDB3w0W553h2btWui1/uZp2GBM/SI3KniTuMcYyHpA5w==
@@ -17,22 +32,20 @@
"@ethersproject/properties" "^5.6.0"
"@ethersproject/strings" "^5.6.0"
"@ethersproject/abi@5.7.0", "@ethersproject/abi@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.7.0.tgz#b3f3e045bbbeed1af3947335c247ad625a44e449"
integrity sha512-351ktp42TiRcYB3H1OP8yajPeAQstMW/yCFokj/AthP9bLHzQFPlOrxOcwYEDkUAICmOHljvN4K39OMTMUa9RA==
"@ethersproject/abstract-provider@5.5.1":
version "5.5.1"
resolved "https://registry.yarnpkg.com/@ethersproject/abstract-provider/-/abstract-provider-5.5.1.tgz#2f1f6e8a3ab7d378d8ad0b5718460f85649710c5"
integrity sha512-m+MA/ful6eKbxpr99xUYeRvLkfnlqzrF8SZ46d/xFB1A7ZVknYc/sXJG0RcufF52Qn2jeFj1hhcoQ7IXjNKUqg==
dependencies:
"@ethersproject/address" "^5.7.0"
"@ethersproject/bignumber" "^5.7.0"
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/constants" "^5.7.0"
"@ethersproject/hash" "^5.7.0"
"@ethersproject/keccak256" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
"@ethersproject/properties" "^5.7.0"
"@ethersproject/strings" "^5.7.0"
"@ethersproject/bignumber" "^5.5.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/networks" "^5.5.0"
"@ethersproject/properties" "^5.5.0"
"@ethersproject/transactions" "^5.5.0"
"@ethersproject/web" "^5.5.0"
"@ethersproject/abstract-provider@5.6.0", "@ethersproject/abstract-provider@^5.6.0":
"@ethersproject/abstract-provider@5.6.0", "@ethersproject/abstract-provider@^5.5.0", "@ethersproject/abstract-provider@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/abstract-provider/-/abstract-provider-5.6.0.tgz#0c4ac7054650dbd9c476cf5907f588bbb6ef3061"
integrity sha512-oPMFlKLN+g+y7a79cLK3WiLcjWFnZQtXWgnLAbHZcN3s7L4v90UHpTOrLk+m3yr0gt+/h9STTM6zrr7PM8uoRw==
@@ -45,20 +58,18 @@
"@ethersproject/transactions" "^5.6.0"
"@ethersproject/web" "^5.6.0"
"@ethersproject/abstract-provider@5.7.0", "@ethersproject/abstract-provider@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/abstract-provider/-/abstract-provider-5.7.0.tgz#b0a8550f88b6bf9d51f90e4795d48294630cb9ef"
integrity sha512-R41c9UkchKCpAqStMYUpdunjo3pkEvZC3FAwZn5S5MGbXoMQOHIdHItezTETxAO5bevtMApSyEhn9+CHcDsWBw==
"@ethersproject/abstract-signer@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/abstract-signer/-/abstract-signer-5.5.0.tgz#590ff6693370c60ae376bf1c7ada59eb2a8dd08d"
integrity sha512-lj//7r250MXVLKI7sVarXAbZXbv9P50lgmJQGr2/is82EwEb8r7HrxsmMqAjTsztMYy7ohrIhGMIml+Gx4D3mA==
dependencies:
"@ethersproject/bignumber" "^5.7.0"
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
"@ethersproject/networks" "^5.7.0"
"@ethersproject/properties" "^5.7.0"
"@ethersproject/transactions" "^5.7.0"
"@ethersproject/web" "^5.7.0"
"@ethersproject/abstract-provider" "^5.5.0"
"@ethersproject/bignumber" "^5.5.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/properties" "^5.5.0"
"@ethersproject/abstract-signer@5.6.0", "@ethersproject/abstract-signer@^5.6.0":
"@ethersproject/abstract-signer@5.6.0", "@ethersproject/abstract-signer@^5.5.0", "@ethersproject/abstract-signer@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/abstract-signer/-/abstract-signer-5.6.0.tgz#9cd7ae9211c2b123a3b29bf47aab17d4d016e3e7"
integrity sha512-WOqnG0NJKtI8n0wWZPReHtaLkDByPL67tn4nBaDAhmVq8sjHTPbCdz4DRhVu/cfTOvfy9w3iq5QZ7BX7zw56BQ==
@@ -69,18 +80,18 @@
"@ethersproject/logger" "^5.6.0"
"@ethersproject/properties" "^5.6.0"
"@ethersproject/abstract-signer@5.7.0", "@ethersproject/abstract-signer@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/abstract-signer/-/abstract-signer-5.7.0.tgz#13f4f32117868452191a4649723cb086d2b596b2"
integrity sha512-a16V8bq1/Cz+TGCkE2OPMTOUDLS3grCpdjoJCYNnVBbdYEMSgKrU0+B90s8b6H+ByYTBZN7a3g76jdIJi7UfKQ==
"@ethersproject/address@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/address/-/address-5.5.0.tgz#bcc6f576a553f21f3dd7ba17248f81b473c9c78f"
integrity sha512-l4Nj0eWlTUh6ro5IbPTgbpT4wRbdH5l8CQf7icF7sb/SI3Nhd9Y9HzhonTSTi6CefI0necIw7LJqQPopPLZyWw==
dependencies:
"@ethersproject/abstract-provider" "^5.7.0"
"@ethersproject/bignumber" "^5.7.0"
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
"@ethersproject/properties" "^5.7.0"
"@ethersproject/bignumber" "^5.5.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/keccak256" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/rlp" "^5.5.0"
"@ethersproject/address@5.6.0", "@ethersproject/address@^5.6.0":
"@ethersproject/address@5.6.0", "@ethersproject/address@^5.5.0", "@ethersproject/address@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/address/-/address-5.6.0.tgz#13c49836d73e7885fc148ad633afad729da25012"
integrity sha512-6nvhYXjbXsHPS+30sHZ+U4VMagFC/9zAk6Gd/h3S21YW4+yfb0WfRtaAIZ4kfM4rrVwqiy284LP0GtL5HXGLxQ==
@@ -91,32 +102,29 @@
"@ethersproject/logger" "^5.6.0"
"@ethersproject/rlp" "^5.6.0"
"@ethersproject/address@5.7.0", "@ethersproject/address@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/address/-/address-5.7.0.tgz#19b56c4d74a3b0a46bfdbb6cfcc0a153fc697f37"
integrity sha512-9wYhYt7aghVGo758POM5nqcOMaE168Q6aRLJZwUmiqSrAungkG74gSSeKEIR7ukixesdRZGPgVqme6vmxs1fkA==
"@ethersproject/base64@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/base64/-/base64-5.5.0.tgz#881e8544e47ed976930836986e5eb8fab259c090"
integrity sha512-tdayUKhU1ljrlHzEWbStXazDpsx4eg1dBXUSI6+mHlYklOXoXF6lZvw8tnD6oVaWfnMxAgRSKROg3cVKtCcppA==
dependencies:
"@ethersproject/bignumber" "^5.7.0"
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/keccak256" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
"@ethersproject/rlp" "^5.7.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/base64@5.6.0", "@ethersproject/base64@^5.6.0":
"@ethersproject/base64@5.6.0", "@ethersproject/base64@^5.5.0", "@ethersproject/base64@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/base64/-/base64-5.6.0.tgz#a12c4da2a6fb86d88563216b0282308fc15907c9"
integrity sha512-2Neq8wxJ9xHxCF9TUgmKeSh9BXJ6OAxWfeGWvbauPh8FuHEjamgHilllx8KkSd5ErxyHIX7Xv3Fkcud2kY9ezw==
dependencies:
"@ethersproject/bytes" "^5.6.0"
"@ethersproject/base64@5.7.0", "@ethersproject/base64@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/base64/-/base64-5.7.0.tgz#ac4ee92aa36c1628173e221d0d01f53692059e1c"
integrity sha512-Dr8tcHt2mEbsZr/mwTPIQAf3Ai0Bks/7gTw9dSqk1mQvhW3XvRlmDJr/4n+wg1JmCl16NZue17CDh8xb/vZ0sQ==
"@ethersproject/basex@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/basex/-/basex-5.5.0.tgz#e40a53ae6d6b09ab4d977bd037010d4bed21b4d3"
integrity sha512-ZIodwhHpVJ0Y3hUCfUucmxKsWQA5TMnavp5j/UOuDdzZWzJlRmuOjcTMIGgHCYuZmHt36BfiSyQPSRskPxbfaQ==
dependencies:
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/properties" "^5.5.0"
"@ethersproject/basex@5.6.0", "@ethersproject/basex@^5.6.0":
"@ethersproject/basex@5.6.0", "@ethersproject/basex@^5.5.0", "@ethersproject/basex@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/basex/-/basex-5.6.0.tgz#9ea7209bf0a1c3ddc2a90f180c3a7f0d7d2e8a69"
integrity sha512-qN4T+hQd/Md32MoJpc69rOwLYRUXwjTlhHDIeUkUmiN/JyWkkLLMoG0TqvSQKNqZOMgN5stbUYN6ILC+eD7MEQ==
@@ -124,15 +132,16 @@
"@ethersproject/bytes" "^5.6.0"
"@ethersproject/properties" "^5.6.0"
"@ethersproject/basex@5.7.0", "@ethersproject/basex@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/basex/-/basex-5.7.0.tgz#97034dc7e8938a8ca943ab20f8a5e492ece4020b"
integrity sha512-ywlh43GwZLv2Voc2gQVTKBoVQ1mti3d8HK5aMxsfu/nRDnMmNqaSJ3r3n85HBByT8OpoY96SXM1FogC533T4zw==
"@ethersproject/bignumber@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/bignumber/-/bignumber-5.5.0.tgz#875b143f04a216f4f8b96245bde942d42d279527"
integrity sha512-6Xytlwvy6Rn3U3gKEc1vP7nR92frHkv6wtVr95LFR3jREXiCPzdWxKQ1cx4JGQBXxcguAwjA8murlYN2TSiEbg==
dependencies:
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/properties" "^5.7.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
bn.js "^4.11.9"
"@ethersproject/bignumber@5.6.0", "@ethersproject/bignumber@^5.6.0":
"@ethersproject/bignumber@5.6.0", "@ethersproject/bignumber@^5.5.0", "@ethersproject/bignumber@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/bignumber/-/bignumber-5.6.0.tgz#116c81b075c57fa765a8f3822648cf718a8a0e26"
integrity sha512-VziMaXIUHQlHJmkv1dlcd6GY2PmT0khtAqaMctCIDogxkrarMzA9L94KN1NeXqqOfFD6r0sJT3vCTOFSmZ07DA==
@@ -141,42 +150,49 @@
"@ethersproject/logger" "^5.6.0"
bn.js "^4.11.9"
"@ethersproject/bignumber@5.7.0", "@ethersproject/bignumber@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/bignumber/-/bignumber-5.7.0.tgz#e2f03837f268ba655ffba03a57853e18a18dc9c2"
integrity sha512-n1CAdIHRWjSucQO3MC1zPSVgV/6dy/fjL9pMrPP9peL+QxEg9wOsVqwD4+818B6LUEtaXzVHQiuivzRoxPxUGw==
"@ethersproject/bytes@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/bytes/-/bytes-5.5.0.tgz#cb11c526de657e7b45d2e0f0246fb3b9d29a601c"
integrity sha512-ABvc7BHWhZU9PNM/tANm/Qx4ostPGadAuQzWTr3doklZOhDlmcBqclrQe/ZXUIj3K8wC28oYeuRa+A37tX9kog==
dependencies:
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
bn.js "^5.2.1"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/bytes@5.6.1", "@ethersproject/bytes@^5.6.0":
"@ethersproject/bytes@5.6.1", "@ethersproject/bytes@^5.5.0", "@ethersproject/bytes@^5.6.0":
version "5.6.1"
resolved "https://registry.yarnpkg.com/@ethersproject/bytes/-/bytes-5.6.1.tgz#24f916e411f82a8a60412344bf4a813b917eefe7"
integrity sha512-NwQt7cKn5+ZE4uDn+X5RAXLp46E1chXoaMmrxAyA0rblpxz8t58lVkrHXoRIn0lz1joQElQ8410GqhTqMOwc6g==
dependencies:
"@ethersproject/logger" "^5.6.0"
"@ethersproject/bytes@5.7.0", "@ethersproject/bytes@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/bytes/-/bytes-5.7.0.tgz#a00f6ea8d7e7534d6d87f47188af1148d71f155d"
integrity sha512-nsbxwgFXWh9NyYWo+U8atvmMsSdKJprTcICAkvbBffT75qDocbuggBU0SJiVK2MuTrp0q+xvLkTnGMPK1+uA9A==
"@ethersproject/constants@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/constants/-/constants-5.5.0.tgz#d2a2cd7d94bd1d58377d1d66c4f53c9be4d0a45e"
integrity sha512-2MsRRVChkvMWR+GyMGY4N1sAX9Mt3J9KykCsgUFd/1mwS0UH1qw+Bv9k1UJb3X3YJYFco9H20pjSlOIfCG5HYQ==
dependencies:
"@ethersproject/logger" "^5.7.0"
"@ethersproject/bignumber" "^5.5.0"
"@ethersproject/constants@5.6.0", "@ethersproject/constants@^5.6.0":
"@ethersproject/constants@5.6.0", "@ethersproject/constants@^5.5.0", "@ethersproject/constants@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/constants/-/constants-5.6.0.tgz#55e3eb0918584d3acc0688e9958b0cedef297088"
integrity sha512-SrdaJx2bK0WQl23nSpV/b1aq293Lh0sUaZT/yYKPDKn4tlAbkH96SPJwIhwSwTsoQQZxuh1jnqsKwyymoiBdWA==
dependencies:
"@ethersproject/bignumber" "^5.6.0"
"@ethersproject/constants@5.7.0", "@ethersproject/constants@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/constants/-/constants-5.7.0.tgz#df80a9705a7e08984161f09014ea012d1c75295e"
integrity sha512-DHI+y5dBNvkpYUMiRQyxRBYBefZkJfo70VUkUAsRjcPs47muV9evftfZ0PJVCXYbAiCgght0DtcF9srFQmIgWA==
"@ethersproject/contracts@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/contracts/-/contracts-5.5.0.tgz#b735260d4bd61283a670a82d5275e2a38892c197"
integrity sha512-2viY7NzyvJkh+Ug17v7g3/IJC8HqZBDcOjYARZLdzRxrfGlRgmYgl6xPRKVbEzy1dWKw/iv7chDcS83pg6cLxg==
dependencies:
"@ethersproject/bignumber" "^5.7.0"
"@ethersproject/abi" "^5.5.0"
"@ethersproject/abstract-provider" "^5.5.0"
"@ethersproject/abstract-signer" "^5.5.0"
"@ethersproject/address" "^5.5.0"
"@ethersproject/bignumber" "^5.5.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/constants" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/properties" "^5.5.0"
"@ethersproject/transactions" "^5.5.0"
"@ethersproject/contracts@5.6.0":
version "5.6.0"
@@ -194,23 +210,21 @@
"@ethersproject/properties" "^5.6.0"
"@ethersproject/transactions" "^5.6.0"
"@ethersproject/contracts@5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/contracts/-/contracts-5.7.0.tgz#c305e775abd07e48aa590e1a877ed5c316f8bd1e"
integrity sha512-5GJbzEU3X+d33CdfPhcyS+z8MzsTrBGk/sc+G+59+tPa9yFkl6HQ9D6L0QMgNTA9q8dT0XKxxkyp883XsQvbbg==
"@ethersproject/hash@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/hash/-/hash-5.5.0.tgz#7cee76d08f88d1873574c849e0207dcb32380cc9"
integrity sha512-dnGVpK1WtBjmnp3mUT0PlU2MpapnwWI0PibldQEq1408tQBAbZpPidkWoVVuNMOl/lISO3+4hXZWCL3YV7qzfg==
dependencies:
"@ethersproject/abi" "^5.7.0"
"@ethersproject/abstract-provider" "^5.7.0"
"@ethersproject/abstract-signer" "^5.7.0"
"@ethersproject/address" "^5.7.0"
"@ethersproject/bignumber" "^5.7.0"
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/constants" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
"@ethersproject/properties" "^5.7.0"
"@ethersproject/transactions" "^5.7.0"
"@ethersproject/abstract-signer" "^5.5.0"
"@ethersproject/address" "^5.5.0"
"@ethersproject/bignumber" "^5.5.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/keccak256" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/properties" "^5.5.0"
"@ethersproject/strings" "^5.5.0"
"@ethersproject/hash@5.6.0", "@ethersproject/hash@^5.6.0":
"@ethersproject/hash@5.6.0", "@ethersproject/hash@^5.5.0", "@ethersproject/hash@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/hash/-/hash-5.6.0.tgz#d24446a5263e02492f9808baa99b6e2b4c3429a2"
integrity sha512-fFd+k9gtczqlr0/BruWLAu7UAOas1uRRJvOR84uDf4lNZ+bTkGl366qvniUZHKtlqxBRU65MkOobkmvmpHU+jA==
@@ -224,22 +238,25 @@
"@ethersproject/properties" "^5.6.0"
"@ethersproject/strings" "^5.6.0"
"@ethersproject/hash@5.7.0", "@ethersproject/hash@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/hash/-/hash-5.7.0.tgz#eb7aca84a588508369562e16e514b539ba5240a7"
integrity sha512-qX5WrQfnah1EFnO5zJv1v46a8HW0+E5xuBBDTwMFZLuVTx0tbU2kkx15NqdjxecrLGatQN9FGQKpb1FKdHCt+g==
"@ethersproject/hdnode@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/hdnode/-/hdnode-5.5.0.tgz#4a04e28f41c546f7c978528ea1575206a200ddf6"
integrity sha512-mcSOo9zeUg1L0CoJH7zmxwUG5ggQHU1UrRf8jyTYy6HxdZV+r0PBoL1bxr+JHIPXRzS6u/UW4mEn43y0tmyF8Q==
dependencies:
"@ethersproject/abstract-signer" "^5.7.0"
"@ethersproject/address" "^5.7.0"
"@ethersproject/base64" "^5.7.0"
"@ethersproject/bignumber" "^5.7.0"
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/keccak256" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
"@ethersproject/properties" "^5.7.0"
"@ethersproject/strings" "^5.7.0"
"@ethersproject/abstract-signer" "^5.5.0"
"@ethersproject/basex" "^5.5.0"
"@ethersproject/bignumber" "^5.5.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/pbkdf2" "^5.5.0"
"@ethersproject/properties" "^5.5.0"
"@ethersproject/sha2" "^5.5.0"
"@ethersproject/signing-key" "^5.5.0"
"@ethersproject/strings" "^5.5.0"
"@ethersproject/transactions" "^5.5.0"
"@ethersproject/wordlists" "^5.5.0"
"@ethersproject/hdnode@5.6.0", "@ethersproject/hdnode@^5.6.0":
"@ethersproject/hdnode@5.6.0", "@ethersproject/hdnode@^5.5.0", "@ethersproject/hdnode@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/hdnode/-/hdnode-5.6.0.tgz#9dcbe8d629bbbcf144f2cae476337fe92d320998"
integrity sha512-61g3Jp3nwDqJcL/p4nugSyLrpl/+ChXIOtCEM8UDmWeB3JCAt5FoLdOMXQc3WWkc0oM2C0aAn6GFqqMcS/mHTw==
@@ -257,25 +274,26 @@
"@ethersproject/transactions" "^5.6.0"
"@ethersproject/wordlists" "^5.6.0"
"@ethersproject/hdnode@5.7.0", "@ethersproject/hdnode@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/hdnode/-/hdnode-5.7.0.tgz#e627ddc6b466bc77aebf1a6b9e47405ca5aef9cf"
integrity sha512-OmyYo9EENBPPf4ERhR7oj6uAtUAhYGqOnIS+jE5pTXvdKBS99ikzq1E7Iv0ZQZ5V36Lqx1qZLeak0Ra16qpeOg==
"@ethersproject/json-wallets@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/json-wallets/-/json-wallets-5.5.0.tgz#dd522d4297e15bccc8e1427d247ec8376b60e325"
integrity sha512-9lA21XQnCdcS72xlBn1jfQdj2A1VUxZzOzi9UkNdnokNKke/9Ya2xA9aIK1SC3PQyBDLt4C+dfps7ULpkvKikQ==
dependencies:
"@ethersproject/abstract-signer" "^5.7.0"
"@ethersproject/basex" "^5.7.0"
"@ethersproject/bignumber" "^5.7.0"
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
"@ethersproject/pbkdf2" "^5.7.0"
"@ethersproject/properties" "^5.7.0"
"@ethersproject/sha2" "^5.7.0"
"@ethersproject/signing-key" "^5.7.0"
"@ethersproject/strings" "^5.7.0"
"@ethersproject/transactions" "^5.7.0"
"@ethersproject/wordlists" "^5.7.0"
"@ethersproject/abstract-signer" "^5.5.0"
"@ethersproject/address" "^5.5.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/hdnode" "^5.5.0"
"@ethersproject/keccak256" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/pbkdf2" "^5.5.0"
"@ethersproject/properties" "^5.5.0"
"@ethersproject/random" "^5.5.0"
"@ethersproject/strings" "^5.5.0"
"@ethersproject/transactions" "^5.5.0"
aes-js "3.0.0"
scrypt-js "3.0.1"
"@ethersproject/json-wallets@5.6.0", "@ethersproject/json-wallets@^5.6.0":
"@ethersproject/json-wallets@5.6.0", "@ethersproject/json-wallets@^5.5.0", "@ethersproject/json-wallets@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/json-wallets/-/json-wallets-5.6.0.tgz#4c2fc27f17e36c583e7a252fb938bc46f98891e5"
integrity sha512-fmh86jViB9r0ibWXTQipxpAGMiuxoqUf78oqJDlCAJXgnJF024hOOX7qVgqsjtbeoxmcLwpPsXNU0WEe/16qPQ==
@@ -294,26 +312,15 @@
aes-js "3.0.0"
scrypt-js "3.0.1"
"@ethersproject/json-wallets@5.7.0", "@ethersproject/json-wallets@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/json-wallets/-/json-wallets-5.7.0.tgz#5e3355287b548c32b368d91014919ebebddd5360"
integrity sha512-8oee5Xgu6+RKgJTkvEMl2wDgSPSAQ9MB/3JYjFV9jlKvcYHUXZC+cQp0njgmxdHkYWn8s6/IqIZYm0YWCjO/0g==
"@ethersproject/keccak256@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/keccak256/-/keccak256-5.5.0.tgz#e4b1f9d7701da87c564ffe336f86dcee82983492"
integrity sha512-5VoFCTjo2rYbBe1l2f4mccaRFN/4VQEYFwwn04aJV2h7qf4ZvI2wFxUE1XOX+snbwCLRzIeikOqtAoPwMza9kg==
dependencies:
"@ethersproject/abstract-signer" "^5.7.0"
"@ethersproject/address" "^5.7.0"
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/hdnode" "^5.7.0"
"@ethersproject/keccak256" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
"@ethersproject/pbkdf2" "^5.7.0"
"@ethersproject/properties" "^5.7.0"
"@ethersproject/random" "^5.7.0"
"@ethersproject/strings" "^5.7.0"
"@ethersproject/transactions" "^5.7.0"
aes-js "3.0.0"
scrypt-js "3.0.1"
"@ethersproject/bytes" "^5.5.0"
js-sha3 "0.8.0"
"@ethersproject/keccak256@5.6.0", "@ethersproject/keccak256@^5.6.0":
"@ethersproject/keccak256@5.6.0", "@ethersproject/keccak256@^5.5.0", "@ethersproject/keccak256@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/keccak256/-/keccak256-5.6.0.tgz#fea4bb47dbf8f131c2e1774a1cecbfeb9d606459"
integrity sha512-tk56BJ96mdj/ksi7HWZVWGjCq0WVl/QvfhFQNeL8fxhBlGoP+L80uDCiQcpJPd+2XxkivS3lwRm3E0CXTfol0w==
@@ -321,39 +328,39 @@
"@ethersproject/bytes" "^5.6.0"
js-sha3 "0.8.0"
"@ethersproject/keccak256@5.7.0", "@ethersproject/keccak256@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/keccak256/-/keccak256-5.7.0.tgz#3186350c6e1cd6aba7940384ec7d6d9db01f335a"
integrity sha512-2UcPboeL/iW+pSg6vZ6ydF8tCnv3Iu/8tUmLLzWWGzxWKFFqOBQFLo6uLUv6BDrLgCDfN28RJ/wtByx+jZ4KBg==
dependencies:
"@ethersproject/bytes" "^5.7.0"
js-sha3 "0.8.0"
"@ethersproject/logger@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/logger/-/logger-5.5.0.tgz#0c2caebeff98e10aefa5aef27d7441c7fd18cf5d"
integrity sha512-rIY/6WPm7T8n3qS2vuHTUBPdXHl+rGxWxW5okDfo9J4Z0+gRRZT0msvUdIJkE4/HS29GUMziwGaaKO2bWONBrg==
"@ethersproject/logger@5.6.0", "@ethersproject/logger@^5.6.0":
"@ethersproject/logger@5.6.0", "@ethersproject/logger@^5.5.0", "@ethersproject/logger@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/logger/-/logger-5.6.0.tgz#d7db1bfcc22fd2e4ab574cba0bb6ad779a9a3e7a"
integrity sha512-BiBWllUROH9w+P21RzoxJKzqoqpkyM1pRnEKG69bulE9TSQD8SAIvTQqIMZmmCO8pUNkgLP1wndX1gKghSpBmg==
"@ethersproject/logger@5.7.0", "@ethersproject/logger@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/logger/-/logger-5.7.0.tgz#6ce9ae168e74fecf287be17062b590852c311892"
integrity sha512-0odtFdXu/XHtjQXJYA3u9G0G8btm0ND5Cu8M7i5vhEcE8/HmF4Lbdqanwyv4uQTr2tx6b7fQRmgLrsnpQlmnig==
"@ethersproject/networks@5.5.2":
version "5.5.2"
resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.5.2.tgz#784c8b1283cd2a931114ab428dae1bd00c07630b"
integrity sha512-NEqPxbGBfy6O3x4ZTISb90SjEDkWYDUbEeIFhJly0F7sZjoQMnj5KYzMSkMkLKZ+1fGpx00EDpHQCy6PrDupkQ==
dependencies:
"@ethersproject/logger" "^5.5.0"
"@ethersproject/networks@5.6.2", "@ethersproject/networks@^5.6.0":
"@ethersproject/networks@5.6.2", "@ethersproject/networks@^5.5.0", "@ethersproject/networks@^5.6.0":
version "5.6.2"
resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.6.2.tgz#2bacda62102c0b1fcee408315f2bed4f6fbdf336"
integrity sha512-9uEzaJY7j5wpYGTojGp8U89mSsgQLc40PCMJLMCnFXTs7nhBveZ0t7dbqWUNrepWTszDbFkYD6WlL8DKx5huHA==
dependencies:
"@ethersproject/logger" "^5.6.0"
"@ethersproject/networks@5.7.1", "@ethersproject/networks@^5.7.0":
version "5.7.1"
resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.7.1.tgz#118e1a981d757d45ccea6bb58d9fd3d9db14ead6"
integrity sha512-n/MufjFYv3yFcUyfhnXotyDlNdFb7onmkSy8aQERi2PjNcnWQ66xXxa3XlS8nCcA8aJKJjIIMNJTC7tu80GwpQ==
"@ethersproject/pbkdf2@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/pbkdf2/-/pbkdf2-5.5.0.tgz#e25032cdf02f31505d47afbf9c3e000d95c4a050"
integrity sha512-SaDvQFvXPnz1QGpzr6/HToLifftSXGoXrbpZ6BvoZhmx4bNLHrxDe8MZisuecyOziP1aVEwzC2Hasj+86TgWVg==
dependencies:
"@ethersproject/logger" "^5.7.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/sha2" "^5.5.0"
"@ethersproject/pbkdf2@5.6.0", "@ethersproject/pbkdf2@^5.6.0":
"@ethersproject/pbkdf2@5.6.0", "@ethersproject/pbkdf2@^5.5.0", "@ethersproject/pbkdf2@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/pbkdf2/-/pbkdf2-5.6.0.tgz#04fcc2d7c6bff88393f5b4237d906a192426685a"
integrity sha512-Wu1AxTgJo3T3H6MIu/eejLFok9TYoSdgwRr5oGY1LTLfmGesDoSx05pemsbrPT2gG4cQME+baTSCp5sEo2erZQ==
@@ -361,27 +368,44 @@
"@ethersproject/bytes" "^5.6.0"
"@ethersproject/sha2" "^5.6.0"
"@ethersproject/pbkdf2@5.7.0", "@ethersproject/pbkdf2@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/pbkdf2/-/pbkdf2-5.7.0.tgz#d2267d0a1f6e123f3771007338c47cccd83d3102"
integrity sha512-oR/dBRZR6GTyaofd86DehG72hY6NpAjhabkhxgr3X2FpJtJuodEl2auADWBZfhDHgVCbu3/H/Ocq2uC6dpNjjw==
"@ethersproject/properties@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/properties/-/properties-5.5.0.tgz#61f00f2bb83376d2071baab02245f92070c59995"
integrity sha512-l3zRQg3JkD8EL3CPjNK5g7kMx4qSwiR60/uk5IVjd3oq1MZR5qUg40CNOoEJoX5wc3DyY5bt9EbMk86C7x0DNA==
dependencies:
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/sha2" "^5.7.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/properties@5.6.0", "@ethersproject/properties@^5.6.0":
"@ethersproject/properties@5.6.0", "@ethersproject/properties@^5.5.0", "@ethersproject/properties@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/properties/-/properties-5.6.0.tgz#38904651713bc6bdd5bdd1b0a4287ecda920fa04"
integrity sha512-szoOkHskajKePTJSZ46uHUWWkbv7TzP2ypdEK6jGMqJaEt2sb0jCgfBo0gH0m2HBpRixMuJ6TBRaQCF7a9DoCg==
dependencies:
"@ethersproject/logger" "^5.6.0"
"@ethersproject/properties@5.7.0", "@ethersproject/properties@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/properties/-/properties-5.7.0.tgz#a6e12cb0439b878aaf470f1902a176033067ed30"
integrity sha512-J87jy8suntrAkIZtecpxEPxY//szqr1mlBaYlQ0r4RCaiD2hjheqF9s1LVE8vVuJCXisjIP+JgtK/Do54ej4Sw==
"@ethersproject/providers@5.5.3":
version "5.5.3"
resolved "https://registry.yarnpkg.com/@ethersproject/providers/-/providers-5.5.3.tgz#56c2b070542ac44eb5de2ed3cf6784acd60a3130"
integrity sha512-ZHXxXXXWHuwCQKrgdpIkbzMNJMvs+9YWemanwp1fA7XZEv7QlilseysPvQe0D7Q7DlkJX/w/bGA1MdgK2TbGvA==
dependencies:
"@ethersproject/logger" "^5.7.0"
"@ethersproject/abstract-provider" "^5.5.0"
"@ethersproject/abstract-signer" "^5.5.0"
"@ethersproject/address" "^5.5.0"
"@ethersproject/basex" "^5.5.0"
"@ethersproject/bignumber" "^5.5.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/constants" "^5.5.0"
"@ethersproject/hash" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/networks" "^5.5.0"
"@ethersproject/properties" "^5.5.0"
"@ethersproject/random" "^5.5.0"
"@ethersproject/rlp" "^5.5.0"
"@ethersproject/sha2" "^5.5.0"
"@ethersproject/strings" "^5.5.0"
"@ethersproject/transactions" "^5.5.0"
"@ethersproject/web" "^5.5.0"
bech32 "1.1.4"
ws "7.4.6"
"@ethersproject/providers@5.6.4":
version "5.6.4"
@@ -408,33 +432,15 @@
bech32 "1.1.4"
ws "7.4.6"
"@ethersproject/providers@5.7.2":
version "5.7.2"
resolved "https://registry.yarnpkg.com/@ethersproject/providers/-/providers-5.7.2.tgz#f8b1a4f275d7ce58cf0a2eec222269a08beb18cb"
integrity sha512-g34EWZ1WWAVgr4aptGlVBF8mhl3VWjv+8hoAnzStu8Ah22VHBsuGzP17eb6xDVRzw895G4W7vvx60lFFur/1Rg==
"@ethersproject/random@5.5.1":
version "5.5.1"
resolved "https://registry.yarnpkg.com/@ethersproject/random/-/random-5.5.1.tgz#7cdf38ea93dc0b1ed1d8e480ccdaf3535c555415"
integrity sha512-YaU2dQ7DuhL5Au7KbcQLHxcRHfgyNgvFV4sQOo0HrtW3Zkrc9ctWNz8wXQ4uCSfSDsqX2vcjhroxU5RQRV0nqA==
dependencies:
"@ethersproject/abstract-provider" "^5.7.0"
"@ethersproject/abstract-signer" "^5.7.0"
"@ethersproject/address" "^5.7.0"
"@ethersproject/base64" "^5.7.0"
"@ethersproject/basex" "^5.7.0"
"@ethersproject/bignumber" "^5.7.0"
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/constants" "^5.7.0"
"@ethersproject/hash" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
"@ethersproject/networks" "^5.7.0"
"@ethersproject/properties" "^5.7.0"
"@ethersproject/random" "^5.7.0"
"@ethersproject/rlp" "^5.7.0"
"@ethersproject/sha2" "^5.7.0"
"@ethersproject/strings" "^5.7.0"
"@ethersproject/transactions" "^5.7.0"
"@ethersproject/web" "^5.7.0"
bech32 "1.1.4"
ws "7.4.6"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/random@5.6.0", "@ethersproject/random@^5.6.0":
"@ethersproject/random@5.6.0", "@ethersproject/random@^5.5.0", "@ethersproject/random@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/random/-/random-5.6.0.tgz#1505d1ab6a250e0ee92f436850fa3314b2cb5ae6"
integrity sha512-si0PLcLjq+NG/XHSZz90asNf+YfKEqJGVdxoEkSukzbnBgC8rydbgbUgBbBGLeHN4kAJwUFEKsu3sCXT93YMsw==
@@ -442,15 +448,15 @@
"@ethersproject/bytes" "^5.6.0"
"@ethersproject/logger" "^5.6.0"
"@ethersproject/random@5.7.0", "@ethersproject/random@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/random/-/random-5.7.0.tgz#af19dcbc2484aae078bb03656ec05df66253280c"
integrity sha512-19WjScqRA8IIeWclFme75VMXSBvi4e6InrUNuaR4s5pTF2qNhcGdCUwdxUVGtDDqC00sDLCO93jPQoDUH4HVmQ==
"@ethersproject/rlp@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/rlp/-/rlp-5.5.0.tgz#530f4f608f9ca9d4f89c24ab95db58ab56ab99a0"
integrity sha512-hLv8XaQ8PTI9g2RHoQGf/WSxBfTB/NudRacbzdxmst5VHAqd1sMibWG7SENzT5Dj3yZ3kJYx+WiRYEcQTAkcYA==
dependencies:
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/rlp@5.6.0", "@ethersproject/rlp@^5.6.0":
"@ethersproject/rlp@5.6.0", "@ethersproject/rlp@^5.5.0", "@ethersproject/rlp@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/rlp/-/rlp-5.6.0.tgz#55a7be01c6f5e64d6e6e7edb6061aa120962a717"
integrity sha512-dz9WR1xpcTL+9DtOT/aDO+YyxSSdO8YIS0jyZwHHSlAmnxA6cKU3TrTd4Xc/bHayctxTgGLYNuVVoiXE4tTq1g==
@@ -458,15 +464,16 @@
"@ethersproject/bytes" "^5.6.0"
"@ethersproject/logger" "^5.6.0"
"@ethersproject/rlp@5.7.0", "@ethersproject/rlp@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/rlp/-/rlp-5.7.0.tgz#de39e4d5918b9d74d46de93af80b7685a9c21304"
integrity sha512-rBxzX2vK8mVF7b0Tol44t5Tb8gomOHkj5guL+HhzQ1yBh/ydjGnpw6at+X6Iw0Kp3OzzzkcKp8N9r0W4kYSs9w==
"@ethersproject/sha2@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/sha2/-/sha2-5.5.0.tgz#a40a054c61f98fd9eee99af2c3cc6ff57ec24db7"
integrity sha512-B5UBoglbCiHamRVPLA110J+2uqsifpZaTmid2/7W5rbtYVz6gus6/hSDieIU/6gaKIDcOj12WnOdiymEUHIAOA==
dependencies:
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
hash.js "1.1.7"
"@ethersproject/sha2@5.6.0", "@ethersproject/sha2@^5.6.0":
"@ethersproject/sha2@5.6.0", "@ethersproject/sha2@^5.5.0", "@ethersproject/sha2@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/sha2/-/sha2-5.6.0.tgz#364c4c11cc753bda36f31f001628706ebadb64d9"
integrity sha512-1tNWCPFLu1n3JM9t4/kytz35DkuF9MxqkGGEHNauEbaARdm2fafnOyw1s0tIQDPKF/7bkP1u3dbrmjpn5CelyA==
@@ -475,16 +482,19 @@
"@ethersproject/logger" "^5.6.0"
hash.js "1.1.7"
"@ethersproject/sha2@5.7.0", "@ethersproject/sha2@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/sha2/-/sha2-5.7.0.tgz#9a5f7a7824ef784f7f7680984e593a800480c9fb"
integrity sha512-gKlH42riwb3KYp0reLsFTokByAKoJdgFCwI+CCiX/k+Jm2mbNs6oOaCjYQSlI1+XBVejwH2KrmCbMAT/GnRDQw==
"@ethersproject/signing-key@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/signing-key/-/signing-key-5.5.0.tgz#2aa37169ce7e01e3e80f2c14325f624c29cedbe0"
integrity sha512-5VmseH7qjtNmDdZBswavhotYbWB0bOwKIlOTSlX14rKn5c11QmJwGt4GHeo7NrL/Ycl7uo9AHvEqs5xZgFBTng==
dependencies:
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/properties" "^5.5.0"
bn.js "^4.11.9"
elliptic "6.5.4"
hash.js "1.1.7"
"@ethersproject/signing-key@5.6.0", "@ethersproject/signing-key@^5.6.0":
"@ethersproject/signing-key@5.6.0", "@ethersproject/signing-key@^5.5.0", "@ethersproject/signing-key@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/signing-key/-/signing-key-5.6.0.tgz#4f02e3fb09e22b71e2e1d6dc4bcb5dafa69ce042"
integrity sha512-S+njkhowmLeUu/r7ir8n78OUKx63kBdMCPssePS89So1TH4hZqnWFsThEd/GiXYp9qMxVrydf7KdM9MTGPFukA==
@@ -496,17 +506,17 @@
elliptic "6.5.4"
hash.js "1.1.7"
"@ethersproject/signing-key@5.7.0", "@ethersproject/signing-key@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/signing-key/-/signing-key-5.7.0.tgz#06b2df39411b00bc57c7c09b01d1e41cf1b16ab3"
integrity sha512-MZdy2nL3wO0u7gkB4nA/pEf8lu1TlFswPNmy8AiYkfKTdO6eXBJyUdmHO/ehm/htHw9K/qF8ujnTyUAD+Ry54Q==
"@ethersproject/solidity@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/solidity/-/solidity-5.5.0.tgz#2662eb3e5da471b85a20531e420054278362f93f"
integrity sha512-9NgZs9LhGMj6aCtHXhtmFQ4AN4sth5HuFXVvAQtzmm0jpSCNOTGtrHZJAeYTh7MBjRR8brylWZxBZR9zDStXbw==
dependencies:
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
"@ethersproject/properties" "^5.7.0"
bn.js "^5.2.1"
elliptic "6.5.4"
hash.js "1.1.7"
"@ethersproject/bignumber" "^5.5.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/keccak256" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/sha2" "^5.5.0"
"@ethersproject/strings" "^5.5.0"
"@ethersproject/solidity@5.6.0":
version "5.6.0"
@@ -520,19 +530,16 @@
"@ethersproject/sha2" "^5.6.0"
"@ethersproject/strings" "^5.6.0"
"@ethersproject/solidity@5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/solidity/-/solidity-5.7.0.tgz#5e9c911d8a2acce2a5ebb48a5e2e0af20b631cb8"
integrity sha512-HmabMd2Dt/raavyaGukF4XxizWKhKQ24DoLtdNbBmNKUOPqwjsKQSdV9GQtj9CBEea9DlzETlVER1gYeXXBGaA==
"@ethersproject/strings@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/strings/-/strings-5.5.0.tgz#e6784d00ec6c57710755699003bc747e98c5d549"
integrity sha512-9fy3TtF5LrX/wTrBaT8FGE6TDJyVjOvXynXJz5MT5azq+E6D92zuKNx7i29sWW2FjVOaWjAsiZ1ZWznuduTIIQ==
dependencies:
"@ethersproject/bignumber" "^5.7.0"
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/keccak256" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
"@ethersproject/sha2" "^5.7.0"
"@ethersproject/strings" "^5.7.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/constants" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/strings@5.6.0", "@ethersproject/strings@^5.6.0":
"@ethersproject/strings@5.6.0", "@ethersproject/strings@^5.5.0", "@ethersproject/strings@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/strings/-/strings-5.6.0.tgz#9891b26709153d996bf1303d39a7f4bc047878fd"
integrity sha512-uv10vTtLTZqrJuqBZR862ZQjTIa724wGPWQqZrofaPI/kUsf53TBG0I0D+hQ1qyNtllbNzaW+PDPHHUI6/65Mg==
@@ -541,16 +548,22 @@
"@ethersproject/constants" "^5.6.0"
"@ethersproject/logger" "^5.6.0"
"@ethersproject/strings@5.7.0", "@ethersproject/strings@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/strings/-/strings-5.7.0.tgz#54c9d2a7c57ae8f1205c88a9d3a56471e14d5ed2"
integrity sha512-/9nu+lj0YswRNSH0NXYqrh8775XNyEdUQAuf3f+SmOrnVewcJ5SBNAjF7lpgehKi4abvNNXyf+HX86czCdJ8Mg==
"@ethersproject/transactions@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/transactions/-/transactions-5.5.0.tgz#7e9bf72e97bcdf69db34fe0d59e2f4203c7a2908"
integrity sha512-9RZYSKX26KfzEd/1eqvv8pLauCKzDTub0Ko4LfIgaERvRuwyaNV78mJs7cpIgZaDl6RJui4o49lHwwCM0526zA==
dependencies:
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/constants" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
"@ethersproject/address" "^5.5.0"
"@ethersproject/bignumber" "^5.5.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/constants" "^5.5.0"
"@ethersproject/keccak256" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/properties" "^5.5.0"
"@ethersproject/rlp" "^5.5.0"
"@ethersproject/signing-key" "^5.5.0"
"@ethersproject/transactions@5.6.0", "@ethersproject/transactions@^5.6.0":
"@ethersproject/transactions@5.6.0", "@ethersproject/transactions@^5.5.0", "@ethersproject/transactions@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/transactions/-/transactions-5.6.0.tgz#4b594d73a868ef6e1529a2f8f94a785e6791ae4e"
integrity sha512-4HX+VOhNjXHZyGzER6E/LVI2i6lf9ejYeWD6l4g50AdmimyuStKc39kvKf1bXWQMg7QNVh+uC7dYwtaZ02IXeg==
@@ -565,10 +578,14 @@
"@ethersproject/rlp" "^5.6.0"
"@ethersproject/signing-key" "^5.6.0"
"@ethersproject/transactions@5.7.0", "@ethersproject/transactions@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/transactions/-/transactions-5.7.0.tgz#91318fc24063e057885a6af13fdb703e1f993d3b"
integrity sha512-kmcNicCp1lp8qanMTC3RIikGgoJ80ztTyvtsFvCYpSCfkjhD0jZ2LOrnbcuxuToLIUYYf+4XwD1rP+B/erDIhQ==
"@ethersproject/units@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/units/-/units-5.5.0.tgz#104d02db5b5dc42cc672cc4587bafb87a95ee45e"
integrity sha512-7+DpjiZk4v6wrikj+TCyWWa9dXLNU73tSTa7n0TSJDxkYbV3Yf1eRh9ToMLlZtuctNYu9RDNNy2USq3AdqSbag==
dependencies:
"@ethersproject/bignumber" "^5.5.0"
"@ethersproject/constants" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/units@5.6.0":
version "5.6.0"
@@ -579,14 +596,26 @@
"@ethersproject/constants" "^5.6.0"
"@ethersproject/logger" "^5.6.0"
"@ethersproject/units@5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/units/-/units-5.7.0.tgz#637b563d7e14f42deeee39245275d477aae1d8b1"
integrity sha512-pD3xLMy3SJu9kG5xDGI7+xhTEmGXlEqXU4OfNapmfnxLVY4EMSSRp7j1k7eezutBPH7RBN/7QPnwR7hzNlEFeg==
"@ethersproject/wallet@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/wallet/-/wallet-5.5.0.tgz#322a10527a440ece593980dca6182f17d54eae75"
integrity sha512-Mlu13hIctSYaZmUOo7r2PhNSd8eaMPVXe1wxrz4w4FCE4tDYBywDH+bAR1Xz2ADyXGwqYMwstzTrtUVIsKDO0Q==
dependencies:
"@ethersproject/bignumber" "^5.7.0"
"@ethersproject/constants" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
"@ethersproject/abstract-provider" "^5.5.0"
"@ethersproject/abstract-signer" "^5.5.0"
"@ethersproject/address" "^5.5.0"
"@ethersproject/bignumber" "^5.5.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/hash" "^5.5.0"
"@ethersproject/hdnode" "^5.5.0"
"@ethersproject/json-wallets" "^5.5.0"
"@ethersproject/keccak256" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/properties" "^5.5.0"
"@ethersproject/random" "^5.5.0"
"@ethersproject/signing-key" "^5.5.0"
"@ethersproject/transactions" "^5.5.0"
"@ethersproject/wordlists" "^5.5.0"
"@ethersproject/wallet@5.6.0":
version "5.6.0"
@@ -609,28 +638,18 @@
"@ethersproject/transactions" "^5.6.0"
"@ethersproject/wordlists" "^5.6.0"
"@ethersproject/wallet@5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/wallet/-/wallet-5.7.0.tgz#4e5d0790d96fe21d61d38fb40324e6c7ef350b2d"
integrity sha512-MhmXlJXEJFBFVKrDLB4ZdDzxcBxQ3rLyCkhNqVu3CDYvR97E+8r01UgrI+TI99Le+aYm/in/0vp86guJuM7FCA==
"@ethersproject/web@5.5.1":
version "5.5.1"
resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.5.1.tgz#cfcc4a074a6936c657878ac58917a61341681316"
integrity sha512-olvLvc1CB12sREc1ROPSHTdFCdvMh0J5GSJYiQg2D0hdD4QmJDy8QYDb1CvoqD/bF1c++aeKv2sR5uduuG9dQg==
dependencies:
"@ethersproject/abstract-provider" "^5.7.0"
"@ethersproject/abstract-signer" "^5.7.0"
"@ethersproject/address" "^5.7.0"
"@ethersproject/bignumber" "^5.7.0"
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/hash" "^5.7.0"
"@ethersproject/hdnode" "^5.7.0"
"@ethersproject/json-wallets" "^5.7.0"
"@ethersproject/keccak256" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
"@ethersproject/properties" "^5.7.0"
"@ethersproject/random" "^5.7.0"
"@ethersproject/signing-key" "^5.7.0"
"@ethersproject/transactions" "^5.7.0"
"@ethersproject/wordlists" "^5.7.0"
"@ethersproject/base64" "^5.5.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/properties" "^5.5.0"
"@ethersproject/strings" "^5.5.0"
"@ethersproject/web@5.6.0", "@ethersproject/web@^5.6.0":
"@ethersproject/web@5.6.0", "@ethersproject/web@^5.5.0", "@ethersproject/web@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.6.0.tgz#4bf8b3cbc17055027e1a5dd3c357e37474eaaeb8"
integrity sha512-G/XHj0hV1FxI2teHRfCGvfBUHFmU+YOSbCxlAMqJklxSa7QMiHFQfAxvwY2PFqgvdkxEKwRNr/eCjfAPEm2Ctg==
@@ -641,18 +660,18 @@
"@ethersproject/properties" "^5.6.0"
"@ethersproject/strings" "^5.6.0"
"@ethersproject/web@5.7.1", "@ethersproject/web@^5.7.0":
version "5.7.1"
resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.7.1.tgz#de1f285b373149bee5928f4eb7bcb87ee5fbb4ae"
integrity sha512-Gueu8lSvyjBWL4cYsWsjh6MtMwM0+H4HvqFPZfB6dV8ctbP9zFAO73VG1cMWae0FLPCtz0peKPpZY8/ugJJX2w==
"@ethersproject/wordlists@5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/wordlists/-/wordlists-5.5.0.tgz#aac74963aa43e643638e5172353d931b347d584f"
integrity sha512-bL0UTReWDiaQJJYOC9sh/XcRu/9i2jMrzf8VLRmPKx58ckSlOJiohODkECCO50dtLZHcGU6MLXQ4OOrgBwP77Q==
dependencies:
"@ethersproject/base64" "^5.7.0"
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
"@ethersproject/properties" "^5.7.0"
"@ethersproject/strings" "^5.7.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/hash" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/properties" "^5.5.0"
"@ethersproject/strings" "^5.5.0"
"@ethersproject/wordlists@5.6.0", "@ethersproject/wordlists@^5.6.0":
"@ethersproject/wordlists@5.6.0", "@ethersproject/wordlists@^5.5.0", "@ethersproject/wordlists@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@ethersproject/wordlists/-/wordlists-5.6.0.tgz#79e62c5276e091d8575f6930ba01a29218ded032"
integrity sha512-q0bxNBfIX3fUuAo9OmjlEYxP40IB8ABgb7HjEZCL5IKubzV3j30CWi2rqQbjTS2HfoyQbfINoKcTVWP4ejwR7Q==
@@ -663,17 +682,6 @@
"@ethersproject/properties" "^5.6.0"
"@ethersproject/strings" "^5.6.0"
"@ethersproject/wordlists@5.7.0", "@ethersproject/wordlists@^5.7.0":
version "5.7.0"
resolved "https://registry.yarnpkg.com/@ethersproject/wordlists/-/wordlists-5.7.0.tgz#8fb2c07185d68c3e09eb3bfd6e779ba2774627f5"
integrity sha512-S2TFNJNfHWVHNE6cNDjbVlZ6MgE17MIxMbMg2zv3wn+3XSJGosL1m9ZVv3GXCf/2ymSsQ+hRI5IzoMJTG6aoVA==
dependencies:
"@ethersproject/bytes" "^5.7.0"
"@ethersproject/hash" "^5.7.0"
"@ethersproject/logger" "^5.7.0"
"@ethersproject/properties" "^5.7.0"
"@ethersproject/strings" "^5.7.0"
"@koa/cors@^3.3.0":
version "3.3.0"
resolved "https://registry.yarnpkg.com/@koa/cors/-/cors-3.3.0.tgz#b4c1c7ee303b7c968c8727f2a638a74675b50bb2"
@@ -877,25 +885,19 @@ bech32@1.1.4:
resolved "https://registry.yarnpkg.com/bech32/-/bech32-1.1.4.tgz#e38c9f37bf179b8eb16ae3a772b40c356d4832e9"
integrity sha512-s0IrSOzLlbvX7yp4WBfPITzpAU8sqQcpsmwXDiKwrG4r491vwCO/XpejasRNl0piBMe/DvP4Tz0mIS/X1DPJBQ==
bls-wallet-clients@0.9.0-405e23a:
version "0.9.0-405e23a"
resolved "https://registry.npmjs.org/bls-wallet-clients/-/bls-wallet-clients-0.9.0-405e23a.tgz#b66121f9ec0cb4e821965606ada203e6601b773d"
integrity sha512-cMm6pq35VU30veCAHt6ArSavlqzXu+olQg+dzUH28fvqSeQsfWz2qiuBekGxSWOCfn8gX1j/8jHEhrGxXS509Q==
bls-wallet-clients@^0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/bls-wallet-clients/-/bls-wallet-clients-0.6.0.tgz#9d9b1add69420bbaf807c1442151e487f4ee87a5"
integrity sha512-6EivjMe2uRGIt6Aq5IampqlmsECavLqHGPm6Ki2l3+c+FnwfOQUzNelctVN/vRVxDbDpTX4iAfTIrYYpr1S/vw==
dependencies:
"@thehubbleproject/bls" "^0.5.1"
ethers "^5.7.2"
node-fetch "2.6.7"
ethers "5.5.4"
bn.js@^4.11.9:
version "4.12.0"
resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88"
integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==
bn.js@^5.2.1:
version "5.2.1"
resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.2.1.tgz#0bc527a6a0d18d0aa8d5b0538ce4a77dccfa7b70"
integrity sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ==
brorand@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f"
@@ -1034,6 +1036,42 @@ escape-html@^1.0.3:
resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988"
integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=
ethers@5.5.4:
version "5.5.4"
resolved "https://registry.yarnpkg.com/ethers/-/ethers-5.5.4.tgz#e1155b73376a2f5da448e4a33351b57a885f4352"
integrity sha512-N9IAXsF8iKhgHIC6pquzRgPBJEzc9auw3JoRkaKe+y4Wl/LFBtDDunNe7YmdomontECAcC5APaAgWZBiu1kirw==
dependencies:
"@ethersproject/abi" "5.5.0"
"@ethersproject/abstract-provider" "5.5.1"
"@ethersproject/abstract-signer" "5.5.0"
"@ethersproject/address" "5.5.0"
"@ethersproject/base64" "5.5.0"
"@ethersproject/basex" "5.5.0"
"@ethersproject/bignumber" "5.5.0"
"@ethersproject/bytes" "5.5.0"
"@ethersproject/constants" "5.5.0"
"@ethersproject/contracts" "5.5.0"
"@ethersproject/hash" "5.5.0"
"@ethersproject/hdnode" "5.5.0"
"@ethersproject/json-wallets" "5.5.0"
"@ethersproject/keccak256" "5.5.0"
"@ethersproject/logger" "5.5.0"
"@ethersproject/networks" "5.5.2"
"@ethersproject/pbkdf2" "5.5.0"
"@ethersproject/properties" "5.5.0"
"@ethersproject/providers" "5.5.3"
"@ethersproject/random" "5.5.1"
"@ethersproject/rlp" "5.5.0"
"@ethersproject/sha2" "5.5.0"
"@ethersproject/signing-key" "5.5.0"
"@ethersproject/solidity" "5.5.0"
"@ethersproject/strings" "5.5.0"
"@ethersproject/transactions" "5.5.0"
"@ethersproject/units" "5.5.0"
"@ethersproject/wallet" "5.5.0"
"@ethersproject/web" "5.5.1"
"@ethersproject/wordlists" "5.5.0"
ethers@^5.5.3:
version "5.6.4"
resolved "https://registry.yarnpkg.com/ethers/-/ethers-5.6.4.tgz#23629e9a7d4bc5802dfb53d4da420d738744b53c"
@@ -1070,42 +1108,6 @@ ethers@^5.5.3:
"@ethersproject/web" "5.6.0"
"@ethersproject/wordlists" "5.6.0"
ethers@^5.7.2:
version "5.7.2"
resolved "https://registry.yarnpkg.com/ethers/-/ethers-5.7.2.tgz#3a7deeabbb8c030d4126b24f84e525466145872e"
integrity sha512-wswUsmWo1aOK8rR7DIKiWSw9DbLWe6x98Jrn8wcTflTVvaXhAMaB5zGAXy0GYQEQp9iO1iSHWVyARQm11zUtyg==
dependencies:
"@ethersproject/abi" "5.7.0"
"@ethersproject/abstract-provider" "5.7.0"
"@ethersproject/abstract-signer" "5.7.0"
"@ethersproject/address" "5.7.0"
"@ethersproject/base64" "5.7.0"
"@ethersproject/basex" "5.7.0"
"@ethersproject/bignumber" "5.7.0"
"@ethersproject/bytes" "5.7.0"
"@ethersproject/constants" "5.7.0"
"@ethersproject/contracts" "5.7.0"
"@ethersproject/hash" "5.7.0"
"@ethersproject/hdnode" "5.7.0"
"@ethersproject/json-wallets" "5.7.0"
"@ethersproject/keccak256" "5.7.0"
"@ethersproject/logger" "5.7.0"
"@ethersproject/networks" "5.7.1"
"@ethersproject/pbkdf2" "5.7.0"
"@ethersproject/properties" "5.7.0"
"@ethersproject/providers" "5.7.2"
"@ethersproject/random" "5.7.0"
"@ethersproject/rlp" "5.7.0"
"@ethersproject/sha2" "5.7.0"
"@ethersproject/signing-key" "5.7.0"
"@ethersproject/solidity" "5.7.0"
"@ethersproject/strings" "5.7.0"
"@ethersproject/transactions" "5.7.0"
"@ethersproject/units" "5.7.0"
"@ethersproject/wallet" "5.7.0"
"@ethersproject/web" "5.7.1"
"@ethersproject/wordlists" "5.7.0"
form-data@^3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f"
@@ -1350,7 +1352,7 @@ negotiator@0.6.3:
resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd"
integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==
node-fetch@2, node-fetch@2.6.7:
node-fetch@2:
version "2.6.7"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad"
integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==

View File

@@ -1,46 +1,32 @@
RPC_URL=https://goerli-rollup.arbitrum.io/rpc
RPC_POLLING_INTERVAL=4000
RPC_URL=http://localhost:8545
USE_TEST_NET=false
ORIGIN=http://localhost:3000
PORT=3000
NETWORK_CONFIG_PATH=../contracts/networks/arbitrum-goerli.json
PRIVATE_KEY_AGG=0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a
PRIVATE_KEY_ADMIN=0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d
NETWORK_CONFIG_PATH=../contracts/networks/local.json
PRIVATE_KEY_AGG=0x0000000000000000000000000000000000000000000000000000000000000a99
PRIVATE_KEY_ADMIN=
TEST_BLS_WALLETS_SECRET=test-bls-wallets-secret
DB_PATH=aggregator.sqlite
PG_HOST=localhost
PG_PORT=5432
PG_USER=bls
PG_PASSWORD=generate-a-strong-password
PG_DB_NAME=bls_aggregator
BUNDLE_TABLE_NAME=bundles
BUNDLE_QUERY_LIMIT=100
MAX_ELIGIBILITY_DELAY=300
MAX_GAS_PER_BUNDLE=2000000
MAX_AGGREGATION_SIZE=12
MAX_AGGREGATION_DELAY_MILLIS=5000
MAX_UNCONFIRMED_AGGREGATIONS=3
LOG_QUERIES=false
TEST_LOGGING=false
REQUIRE_FEES=true
BREAKEVEN_OPERATION_COUNT=4.5
ALLOW_LOSSES=true
FEE_TYPE=ether
# Set this to false in production to avoid an unexpected transaction on startup.
# Use ./programs/createInternalBlsWallet.ts beforehand instead.
AUTO_CREATE_INTERNAL_BLS_WALLET=true
# Arbitrum doesn't seem to use/need priority fees
PRIORITY_FEE_PER_GAS=0
# Arbitrum doesn't change its base fee much, in fact it's usually locked at
# 0.1gwei. They use changes in gasLimit to account for L1 base fee changes.
PREVIOUS_BASE_FEE_PERCENT_INCREASE=2
BUNDLE_CHECKING_CONCURRENCY=8
IS_OPTIMISM=false
OPTIMISM_GAS_PRICE_ORACLE_ADDRESS=0x420000000000000000000000000000000000000F
OPTIMISM_L1_BASE_FEE_PERCENT_INCREASE=2
FEE_PER_GAS=0
FEE_PER_BYTE=0

View File

@@ -1,42 +0,0 @@
RPC_URL=http://localhost:8545
RPC_POLLING_INTERVAL=500
USE_TEST_NET=false
ORIGIN=http://localhost:3000
PORT=3000
NETWORK_CONFIG_PATH=../contracts/networks/local.json
PRIVATE_KEY_AGG=0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80
PRIVATE_KEY_ADMIN=0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d
TEST_BLS_WALLETS_SECRET=test-bls-wallets-secret
DB_PATH=aggregator.sqlite
BUNDLE_QUERY_LIMIT=100
MAX_ELIGIBILITY_DELAY=300
MAX_GAS_PER_BUNDLE=2000000
MAX_AGGREGATION_DELAY_MILLIS=5000
MAX_UNCONFIRMED_AGGREGATIONS=3
LOG_QUERIES=false
TEST_LOGGING=false
REQUIRE_FEES=true
BREAKEVEN_OPERATION_COUNT=2.5
ALLOW_LOSSES=true
FEE_TYPE=ether
# Set this to false in production to avoid an unexpected transaction on startup.
# Use ./programs/createInternalBlsWallet.ts beforehand instead.
AUTO_CREATE_INTERNAL_BLS_WALLET=true
# 0.5 gwei
PRIORITY_FEE_PER_GAS=500000000
PREVIOUS_BASE_FEE_PERCENT_INCREASE=13
BUNDLE_CHECKING_CONCURRENCY=8
IS_OPTIMISM=false

View File

@@ -1,39 +0,0 @@
RPC_URL=http://localhost:8545
RPC_POLLING_INTERVAL=500
USE_TEST_NET=false
ORIGIN=http://localhost:3000
PORT=3000
NETWORK_CONFIG_PATH=../contracts/networks/local.json
PRIVATE_KEY_AGG=0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80
PRIVATE_KEY_ADMIN=0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d
TEST_BLS_WALLETS_SECRET=test-bls-wallets-secret
DB_PATH=aggregator.sqlite
BUNDLE_QUERY_LIMIT=100
MAX_ELIGIBILITY_DELAY=300
MAX_GAS_PER_BUNDLE=2000000
MAX_AGGREGATION_DELAY_MILLIS=5000
MAX_UNCONFIRMED_AGGREGATIONS=3
LOG_QUERIES=true
TEST_LOGGING=true
REQUIRE_FEES=true
BREAKEVEN_OPERATION_COUNT=2.5
ALLOW_LOSSES=true
FEE_TYPE=ether
AUTO_CREATE_INTERNAL_BLS_WALLET=true
PRIORITY_FEE_PER_GAS=500000000
PREVIOUS_BASE_FEE_PERCENT_INCREASE=13
BUNDLE_CHECKING_CONCURRENCY=8
IS_OPTIMISM=false

View File

@@ -1,6 +1,4 @@
.env*
!.env*.example
!.env*.test
!.env.example
cov_profile*
/build
/aggregator.sqlite

View File

@@ -13,6 +13,7 @@
"runtimeExecutable": "deno",
"runtimeArgs": [
"run",
"--unstable",
"--inspect",
"--allow-all"
],

View File

@@ -1,15 +1,14 @@
FROM denoland/deno:1.30.1
FROM denoland/deno:1.20.6
ADD build /app
WORKDIR /app
RUN deno cache ts/programs/aggregator.ts
ENV IS_DOCKER="true"
RUN deno cache --unstable ts/programs/aggregator.ts
CMD [ \
"deno", \
"run", \
"--unstable", \
"-A", \
"ts/programs/aggregator.ts" \
]

View File

@@ -6,66 +6,9 @@ Accepts transaction bundles (including bundles that contain a single
transaction) and submits aggregations of these bundles to the configured
Verification Gateway.
## Docker Usage
Docker images of the aggregator are
[available on DockerHub](https://hub.docker.com/r/blswallet/aggregator).
If you're targeting a network that
[already has a deployment of the BLSWallet contracts](../contracts/networks),
you can use these images standalone (without this repository) as follows:
```sh
mkdir aggregator
cd aggregator
curl https://raw.githubusercontent.com/web3well/bls-wallet/main/aggregator/.env.example >.env
# Replace CHOSEN_NETWORK below
curl https://raw.githubusercontent.com/web3well/bls-wallet/main/contracts/networks/CHOSEN_NETWORK.json >networkConfig.json
```
In `.env`:
- Change `RPC_URL`
- (If using `localhost`, you probably want `host.docker.internal`)
- Change `PRIVATE_KEY_AGG`
- Ignore `NETWORK_CONFIG_PATH` (it's not used inside docker)
- See [Configuration](#configuration) for more detail and other options
If you're running in production, you might want to set
`AUTO_CREATE_INTERNAL_BLS_WALLET` to `false`. The internal BLS wallet is needed
for user fee estimation. Creating it is a one-time setup that will use
`PRIVATE_KEY_AGG` to pay for gas. You can create it explicitly like this:
```sh
docker run \
--rm \
-it \
--mount type=bind,source="$PWD/.env",target=/app/.env \
--mount type=bind,source="$PWD/networkConfig.json",target=/app/networkConfig.json \
blswallet/aggregator \
./ts/programs/createInternalBlsWallet.ts
```
Finally, start the aggregator:
```sh
docker run \
--name choose-container-name \ # Optional
-d \ # Optional
-p3000:3000 \ # If you chose a different PORT in .env, change it here too
--restart=unless-stopped \ # Optional
--mount type=bind,source="$PWD/.env",target=/app/.env \
--mount type=bind,source="$PWD/networkConfig.json",target=/app/networkConfig.json \
blswallet/aggregator # Tags of the form :git-$VERSION are also available
```
(You may need to remove the comments before pasting into your terminal.)
## Installation
Install [Deno](deno.land)
Install [Deno](deno.land).
### Configuration
@@ -80,44 +23,74 @@ you might have:
```
.env.local
.env.arbitrum-goerli
.env.optimism-goerli
.env.optimistic-kovan
```
If you don't have a `.env`, you will need to append `--env <name>` to all
commands.
#### Environment Variables
### PostgreSQL
| Name | Example Value | Description |
| ------------------------------------- | ------------------------------------------------------------------ | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| RPC_URL | https://localhost:8545 | The RPC endpoint for an EVM node that the BLS Wallet contracts are deployed on |
| RPC_POLLING_INTERVAL | 4000 | How long to wait between retries, when needed (used by ethers when waiting for blocks) |
| USE_TEST_NET | false | Whether to set all transaction's `gasPrice` to 0. Workaround for some networks |
| ORIGIN | http://localhost:3000 | The origin for the aggregator client. Used only in manual tests |
| PORT | 3000 | The port to bind the aggregator to |
| NETWORK_CONFIG_PATH | ../contracts/networks/local.json | Path to the network config file, which contains information on deployed BLS Wallet contracts |
| PRIVATE_KEY_AGG | 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 | Private key for the EOA account used to submit bundles on chain. Transactions are paid by the account linked to PRIVATE_KEY_AGG. By default, bundles must pay for themselves by sending funds to tx.origin or the aggregators onchain address |
| PRIVATE_KEY_ADMIN | 0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d | Private key for the admin EOA account. Used only in tests |
| TEST_BLS_WALLETS_SECRET | test-bls-wallets-secret | Secret used to seed BLS Wallet private keys during tests |
| DB_PATH | aggregator.sqlite | File path of the sqlite db |
| BUNDLE_QUERY_LIMIT | 100 | Maximum number of bundles returned from sqlite |
| MAX_GAS_PER_BUNDLE | 2000000 | Limits the amount of user operations which can be bundled together by using this value as the approximate limit on the amount of gas in an aggregate bundle |
| MAX_AGGREGATION_DELAY_MILLIS | 5000 | Maximum amount of time in milliseconds aggregator will wait before submitting bundles on chain. A higher number will allow more time for bundles to fill, but may result in longer periods before submission. A lower number allows more frequent L2 submissions, but may result in smaller bundles |
| MAX_UNCONFIRMED_AGGREGATIONS | 3 | Maximum unconfirmed bundle aggregations that will be submitted on chain |
| LOG_QUERIES | false | Whether to print sqlite queries in event log. When running tests, `TEST_LOGGING` must also be enabled |
| TEST_LOGGING | false | Whether to print aggregator server events to stdout during tests. Useful for debugging & logging |
| REQUIRE_FEES | true | Whether to require that user bundles pay the aggregator a sufficient fee |
| BREAKEVEN_OPERATION_COUNT | 4.5 | The aggregator must pay an overhead to submit a bundle regardless of how many operations it contains. This parameter determines how much each operation must contribute to this overhead |
| ALLOW_LOSSES | true | Even if each user bundle pays the required fee, the aggregate bundle may not be profitable if it is too small. Setting this to true makes the aggregator submit these bundles anyway |
| FEE_TYPE | ether OR token:0xabcd...1234 | The fee type the aggregator will accept. Either `ether` for ETH/chains native currency or `token:0xabcd...1234` (token contract address) for an ERC20 token |
| AUTO_CREATE_INTERNAL_BLS_WALLET | false | An internal BLS wallet is used to calculate bundle overheads. Setting this to true allows creating this wallet on startup, but might be undesirable in production (see `programs/createInternalBlsWallet.ts` for manual creation) |
| PRIORITY_FEE_PER_GAS | 0 | The priority fee used when submitting bundles (and passed on as a requirement for user bundles) |
| PREVIOUS_BASE_FEE_PERCENT_INCREASE | 2 | Used to determine the max basefee attached to aggregator transaction (and passed on as a requirement for user bundles)s |
| BUNDLE_CHECKING_CONCURRENCY | 8 | The maximum number of bundles that are checked concurrently (getting gas usage, detecting fees, etc) |
| IS_OPTIMISM | false | Optimism's strategy for charging for L1 fees requires special logic in the aggregator. In addition to gasEstimate * gasPrice, we need to replicate Optimism's calculation and pass it on to the user |
| OPTIMISM_GAS_PRICE_ORACLE_ADDRESS | 0x420000000000000000000000000000000000000F | Address for the Optimism gas price oracle contract. Required when IS_OPTIMISM is true |
| OPTIMISM_L1_BASE_FEE_PERCENT_INCREASE | 2 | Similar to PREVIOUS_BASE_FEE_PERCENT_INCREASE, but for the L1 basefee for the optimism-specific calculation. This gets passed on to users. Required when IS_OPTIMISM is true |
#### With docker-compose
```sh
cd .. # root of repo
docker-compose up -d postgres
```
#### Local Install
Install, e.g.:
```sh
sudo apt update
sudo apt install postgresql postgresql-contrib
```
Create a user called `bls`:
```
$ sudo -u postgres createuser --interactive
Enter name of role to add: bls
Shall the new role be a superuser? (y/n) n
Shall the new role be allowed to create databases? (y/n) n
Shall the new role be allowed to create more new roles? (y/n) n
```
Set the user's password:
```
$ sudo -u postgres psql
psql (12.6 (Ubuntu 12.6-0ubuntu0.20.04.1))
Type "help" for help.
postgres=# ALTER USER bls WITH PASSWORD 'generate-a-strong-password';
```
Create a table called `bls_aggregator`:
```sh
sudo -u postgres createdb bls_aggregator
```
On Ubuntu (and probably elsewhere), postgres is configured to offer SSL
connections but with an invalid certificate. However, the deno driver for
postgres doesn't support this.
There are two options here:
1. Set up SSL with a valid certificate
([guide](https://www.postgresql.org/docs/current/ssl-tcp.html)).
2. Turn off SSL in postgres (only for development or if you can ensure the
connection isn't vulnerable to attack).
1. View the config location with
`sudo -u postgres psql -c 'SHOW config_file'`.
2. Turn off ssl in that config.
```diff
-ssl = on
+ssl = off
```
3. Restart postgres `sudo systemctl restart postgresql`.
## Running
@@ -129,20 +102,6 @@ Can be run locally or hosted.
# ./programs/aggregator.ts --env <name>
```
**Note**: It's also possible to run the aggregator directly from github:
```sh
deno run \
--allow-net \
--allow-env \
--allow-read=. \
--allow-write=. \
https://raw.githubusercontent.com/web3well/bls-wallet/main/aggregator/programs/aggregator.ts
```
(This can be done without a clone of the repository, but you'll still need to
set up `.env` and your network config.)
## Testing
- launch optimism
@@ -151,85 +110,6 @@ set up `.env` and your network config.)
NB each test must use unique address(es). (+ init code)
## Fees
### User Guide
User bundles must pay fees to compensate the aggregator (except in testing
situations where the aggregator may be configured to accept bundles which don't
pay fees (see `REQUIRE_FEES`)). The aggregator simply detects fees have been
paid by observing the effect of a user bundle on its balance. This allows
bundles to pay the aggregator using any mechanism of their choosing, and is why
bundles do not have fields for paying fees explicitly.
The simplest way to do this is to include an extra action to pay `tx.origin`.
Use the `POST /estimateFee` API to determine the fee required for a bundle. The
body of this request is the bundle. Response:
```json
{
"feeType": "(See FEE_TYPE enviroment variable)",
"feeDetected": "(The fee that has been detected for the provided bundle)",
"feeRequired": "(Required fee)",
"successes": [
/* Array of bools indicating success of each action */
]
}
```
Note that if you want to pay the aggregator using an additional action, you
should include this additional action with a payment of zero when estimating,
otherwise the additional action will increase the fee that needs to be paid. You
can also use the [aggregator-proxy](../aggregator-proxy/) package as a proxy in
place of an aggregator. This is useful to run more advanced logic such as
inspecting bundles and potentially paying for them, before the proxy aggregator
then sends the bundles to an underlying aggregator.
Also, `feeRequired` is the absolute minimum necessary fee to process the bundle
at the time of estimation, so paying extra is advisable to increase the chance
that the fee is sufficient during submission.
In the case of a malicious aggregator, or if the chosen aggregator service goes
down, an end user can always execute actions themselves, by submitting a bundle
on chain via `VerificationGatewaty.processBundle`.
### Technical Detail
The fees required by the aggregator are designed to prevent it from losing
money. There are two main ways that losses can still happen:
1. Bundles that don't simulate accurately
2. Bundles that make losses are allowed in config (`ALLOW_LOSSES`)
When calculating the required fee, the aggregator needs to account for two
things:
1. The marginal cost of including the user bundle
2. A contribution to the overhead of submitting the aggregate bundle
Remember that the whole point of aggregation is to save on fees using a single
aggregate signature. This means that measuring the fee required to process the
user bundle in isolation won't reflect that saving.
Instead, we measure the overhead using hypothetical operations that contain zero
actions. We make a bundle with one of these, and another with two of these, and
extrapolate backwards to a bundle containing zero operations (see
`measureBundleOverheadGas`).
We can then subtract that overhead from the user's bundle to obtain its marginal
cost.
The user's share of the overhead is then added by multiplying it by
`operationCount / BREAKEVEN_OPERATION_COUNT`. User bundles usually have an
`operationCount` of 1, so if `BREAKEVEN_OPERATION_COUNT` is 4.5, then the bundle
will be required to pay 22% of the overhead.
From the aggregator's perspective, aggregate bundles with fewer operations than
`BREAKEVEN_OPERATION_COUNT` should make a loss, and larger bundles should make a
profit. If `ALLOW_LOSSES` is `false`, bundles which are predicted to make a loss
will not be submitted.
## Development
### Environment
@@ -253,7 +133,7 @@ Tests are defined in `test`. Running them directly is a bit verbose because of
the deno flags you need:
```sh
deno test --allow-net --allow-env --allow-read
deno test --allow-net --allow-env --allow-read --unstable
```
Instead, `./programs/premerge.ts` may be more useful for you. It'll make sure
@@ -285,22 +165,9 @@ TS2300 [ERROR]: Duplicate identifier 'TypedArray'.
You need to reload modules (`-r`):
```sh
deno run -r --allow-net --allow-env --allow-read ./programs/aggregator.ts
deno run -r --allow-net --allow-env --allow-read --unstable ./programs/aggregator.ts
```
#### Transaction reverted: function call to a non-contract account
- Is `./contracts/contracts/lib/hubble-contracts/contracts/libs/BLS.sol`'s
`COST_ESTIMATOR_ADDRESS` set to the right precompile cost estimator's contract
address?
- Are the BLS Wallet contracts deployed on the correct network?
- Is `NETWORK_CONFIG_PATH` in `.env` set to the right config?
#### Deno version
Make sure your Deno version is
[up to date.](https://deno.land/manual/getting_started/installation#updating)
### Notable Components
- **src/chain**: Should contain all of the contract interactions, exposing more
@@ -317,9 +184,10 @@ Make sure your Deno version is
- **`BundleService`**: Keeps track of all stored transactions, as well as
accepting (or rejecting) them and submitting aggregated bundles to
`EthereumService`.
- **`BundleTable`**: Abstraction layer over sqlite bundle tables, exposing typed
functions instead of queries. Handles conversions to and from the field types
supported by sqlite so that other code can has a uniform js-friendly interface
- **`BundleTable`**: Abstraction layer over postgres bundle tables, exposing
typed functions instead of queries. Handles conversions to and from the field
types supported by postgres so that other code can has a uniform js-friendly
interface
([`TransactionData`](https://github.com/jzaki/bls-wallet-signer/blob/673e2ae/src/types.ts#L12)).
- **`Client`**: Provides an abstraction over the external HTTP interface so that
programs talking to the aggregator can do so via regular js functions with
@@ -334,9 +202,16 @@ Make sure your Deno version is
## Hosting Guide
1. Configure your server to allow TCP on ports 80 and 443
2. Install docker and nginx:
2. Follow the [Installation](#Installation) instructions
3. Install docker and nginx:
`sudo apt update && sudo apt install docker.io nginx`
3. Configure log rotation in docker by setting `/etc/docker/daemon.json` to
4. Run `./programs/build.ts`
- If you're using a named environment, add `--env <name>`
- If `docker` requires `sudo`, add `--sudo-docker`
5. Configure log rotation in docker by setting `/etc/docker/daemon.json` to
```json
{
@@ -350,9 +225,19 @@ Make sure your Deno version is
and restart docker `sudo systemctl restart docker`
4. Follow the [Docker Usage](#docker-usage) instructions (just use port 3000,
external requests are handled by nginx)
5. Create `/etc/nginx/sites-available/aggregator`
6. Load the docker image: `sudo docker load <docker-image.tar.gz`
7. Run the aggregator:
```sh
sudo docker run \
--name aggregator \
-d \
--net=host \
--restart=unless-stopped \
aggregator:latest
```
8. Create `/etc/nginx/sites-available/aggregator`
```nginx
server {
@@ -375,7 +260,7 @@ This allows you to add some static content at `/home/aggregator/static-content`.
Adding static content is optional; requests that don't match static content will
be passed to the aggregator.
6. Create a symlink in sites-enabled
9. Create a symlink in sites-enabled
```sh
ln -s /etc/nginx/sites-available/aggregator /etc/nginx/sites-enabled/aggregator
@@ -383,5 +268,5 @@ ln -s /etc/nginx/sites-available/aggregator /etc/nginx/sites-enabled/aggregator
Reload nginx for config to take effect: `sudo nginx -s reload`
7. Set up https for your domain by following the instructions at
https://certbot.eff.org/lets-encrypt/ubuntufocal-nginx.
10. Set up https for your domain by following the instructions at
https://certbot.eff.org/lets-encrypt/ubuntufocal-nginx.

View File

@@ -27,21 +27,17 @@ export {
Contract,
ethers,
Wallet,
} from "https://esm.sh/ethers@5.7.2";
} from "https://esm.sh/ethers@5.5.4";
import { ethers } from "https://esm.sh/ethers@5.7.2";
import { ethers } from "https://esm.sh/ethers@5.5.4";
export type {
BaseContract,
BigNumberish,
BytesLike,
} from "https://esm.sh/ethers@5.7.2";
} from "https://esm.sh/ethers@5.5.4";
export const keccak256 = ethers.utils.keccak256;
// Adding more accurate type information here (ethers uses Array<any>)
export const shuffled: <T>(array: T[]) => T[] = ethers.utils.shuffled;
export type {
ActionData,
AggregatorUtilities,
BlsWalletSigner,
Bundle,
@@ -50,34 +46,39 @@ export type {
MockERC20,
NetworkConfig,
Operation,
OperationResultError,
PublicKey,
Signature,
VerificationGateway,
} from "https://esm.sh/bls-wallet-clients@0.9.0-405e23a";
} from "https://esm.sh/bls-wallet-clients@0.6.0";
export {
Aggregator as AggregatorClient,
AggregatorUtilitiesFactory,
BlsRegistrationCompressor,
AggregatorUtilities__factory,
BlsWalletWrapper,
BundleCompressor,
ContractsConnector,
decodeError,
Erc20Compressor,
ERC20Factory,
FallbackCompressor,
ERC20__factory,
getConfig,
MockERC20Factory,
VerificationGatewayFactory,
} from "https://esm.sh/bls-wallet-clients@0.9.0-405e23a";
MockERC20__factory,
VerificationGateway__factory,
} from "https://esm.sh/bls-wallet-clients@0.6.0";
// Workaround for esbuild's export-star bug
import blsWalletClients from "https://esm.sh/bls-wallet-clients@0.9.0-405e23a";
const { bundleFromDto, bundleToDto, initBlsWalletSigner } = blsWalletClients;
import blsWalletClients from "https://esm.sh/bls-wallet-clients@0.6.0";
const {
bundleFromDto,
bundleToDto,
initBlsWalletSigner,
} = blsWalletClients;
export { bundleFromDto, bundleToDto, initBlsWalletSigner };
export * as sqlite from "https://deno.land/x/sqlite@v3.7.0/mod.ts";
export { Semaphore } from "https://deno.land/x/semaphore@v1.1.2/mod.ts";
// Database dependencies
export {
Constraint,
CreateTableMode,
DataType,
OrderByType,
QueryClient,
QueryTable,
unsketchify,
} from "https://deno.land/x/postquery@v0.1.1/mod.ts";
export { pick } from "npm:@s-libs/micro-dash@15.2.0";
export type { TableOptions } from "https://deno.land/x/postquery@v0.1.1/mod.ts";

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write --unstable
import { AggregatorClient } from "../deps.ts";
import * as env from "../src/env.ts";
@@ -9,7 +9,7 @@ const client = new AggregatorClient(env.ORIGIN);
const fx = await Fixture.create(import.meta.url);
const [wallet] = await fx.setupWallets(1);
const bundle = await wallet.signWithGasEstimate({
const bundle = wallet.sign({
nonce: await wallet.Nonce(),
actions: [{
ethValue: 0,

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write --unstable
import { AggregatorClient, BigNumber, Bundle } from "../deps.ts";
import * as env from "../src/env.ts";
@@ -17,7 +17,6 @@ const bundle: Bundle = {
senderPublicKeys: [[dummyHex(32), dummyHex(32), dummyHex(32), dummyHex(32)]],
operations: [{
nonce: BigNumber.from(0),
gas: BigNumber.from(0),
actions: [{
ethValue: BigNumber.from(0),
contractAddress: dummyHex(20),

View File

@@ -1,17 +0,0 @@
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read
import * as env from "../src/env.ts";
import { ethers } from "../deps.ts";
import OptimismGasPriceOracle from "../src/app/OptimismGasPriceOracle.ts";
const oracle = new OptimismGasPriceOracle(
new ethers.providers.JsonRpcProvider(env.RPC_URL),
);
const { l1BaseFee, overhead, scalar, decimals } = await oracle.getAllParams();
console.log({
l1BaseFee: `${(l1BaseFee.toNumber() / 1e9).toFixed(3)} gwei`,
overhead: `${overhead.toNumber()} L1 gas`,
scalar: scalar.toNumber() / (10 ** decimals.toNumber()),
});

View File

@@ -1,15 +1,16 @@
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write --unstable
import { ethers } from "../deps.ts";
import * as env from "../src/env.ts";
import TestBlsWallet from "./helpers/TestBlsWallet.ts";
import TestBlsWallets from "./helpers/TestBlsWallets.ts";
const wallet = await TestBlsWallet(
const [wallet] = await TestBlsWallets(
new ethers.providers.JsonRpcProvider(env.RPC_URL),
1,
);
console.log({
privateKey: wallet.blsWalletSigner.privateKey,
privateKey: wallet.privateKey,
address: wallet.walletContract.address,
});

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write --unstable
import {
AggregatorClient,

View File

@@ -1,33 +0,0 @@
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write
import { AggregatorClient, ethers } from "../deps.ts";
import AdminWallet from "../src/chain/AdminWallet.ts";
import * as env from "../test/env.ts";
import TestBlsWallet from "./helpers/TestBlsWallet.ts";
const provider = new ethers.providers.JsonRpcProvider(env.RPC_URL);
const client = new AggregatorClient(env.ORIGIN);
const wallet = await TestBlsWallet(provider);
const adminWallet = AdminWallet(provider);
await (await adminWallet.sendTransaction({
to: wallet.address,
value: 1,
})).wait();
const bundle = await wallet.signWithGasEstimate({
nonce: await wallet.Nonce(),
actions: [{
ethValue: 1,
contractAddress: adminWallet.address,
encodedFunction: "0x",
}],
});
const feeEstimation = await client.estimateFee(bundle);
console.log({ feeEstimation });

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write --unstable
import { ethers } from "../deps.ts";

View File

@@ -1,17 +0,0 @@
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read
import { ethers } from "../deps.ts";
import * as env from "../src/env.ts";
import getOptimismL1Fee from "../src/helpers/getOptimismL1Fee.ts";
const provider = new ethers.providers.JsonRpcProvider(env.RPC_URL);
const txHash = Deno.args[0];
if (!txHash.startsWith("0x")) {
throw new Error("First arg should be tx hash");
}
const l1Fee = await getOptimismL1Fee(provider, txHash);
console.log(`${ethers.utils.formatEther(l1Fee)} ETH`);

View File

@@ -1,15 +0,0 @@
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read
import { ethers } from "../deps.ts";
import * as env from "../src/env.ts";
import getRawTransaction from "../src/helpers/getRawTransaction.ts";
const provider = new ethers.providers.JsonRpcProvider(env.RPC_URL);
const txHash = Deno.args[0];
if (!txHash.startsWith("0x")) {
throw new Error("First arg should be tx hash");
}
console.log(await getRawTransaction(provider, txHash));

View File

@@ -2,23 +2,28 @@ import { BlsWalletWrapper, ethers } from "../../deps.ts";
import * as env from "../../test/env.ts";
import AdminWallet from "../../src/chain/AdminWallet.ts";
import Range from "../../src/helpers/Range.ts";
import Rng from "../../src/helpers/Rng.ts";
import getNetworkConfig from "../../src/helpers/getNetworkConfig.ts";
export default async function TestBlsWallet(
export default async function TestBlsWallets(
provider: ethers.providers.Provider,
index?: number,
count: number,
) {
const { addresses } = await getNetworkConfig();
const parent = AdminWallet(provider);
const rng = Rng.root.seed(env.PRIVATE_KEY_ADMIN, env.TEST_BLS_WALLETS_SECRET);
const secret = rng.seed(`${index}`).address();
return await BlsWalletWrapper.connect(
secret,
addresses.verificationGateway,
parent.provider,
const wallets = await Promise.all(
Range(count).map(async (i) => {
const secret = rng.seed(`${i}`).address();
return await BlsWalletWrapper.connect(
secret,
addresses.verificationGateway,
parent.provider,
);
}),
);
return wallets;
}

View File

@@ -1,10 +0,0 @@
import { ethers } from "../../deps.ts";
export default async function receiptOf(
responsePromise: Promise<ethers.providers.TransactionResponse>,
): Promise<ethers.providers.TransactionReceipt> {
const response = await responsePromise;
const receipt = await response.wait();
return receipt;
}

View File

@@ -1,103 +1,42 @@
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write --unstable
import {
ActionData,
AggregatorClient,
AggregatorUtilitiesFactory,
BigNumber,
delay,
ethers,
MockERC20Factory,
MockERC20__factory,
} from "../deps.ts";
import AdminWallet from "../src/chain/AdminWallet.ts";
import assert from "../src/helpers/assert.ts";
import getNetworkConfig from "../src/helpers/getNetworkConfig.ts";
import * as env from "../test/env.ts";
import TestBlsWallet from "./helpers/TestBlsWallet.ts";
const [walletIndexStr = "0"] = Deno.args;
const walletIndex = Number(walletIndexStr);
import TestBlsWallets from "./helpers/TestBlsWallets.ts";
const { addresses } = await getNetworkConfig();
const provider = new ethers.providers.JsonRpcProvider(env.RPC_URL);
const testErc20 = MockERC20Factory.connect(addresses.testToken, provider);
const testErc20 = MockERC20__factory.connect(addresses.testToken, provider);
const client = new AggregatorClient(env.ORIGIN);
const wallet = await TestBlsWallet(provider, walletIndex);
const nonce = await wallet.Nonce();
const adminWallet = AdminWallet(provider);
console.log("Funding wallet");
await (await adminWallet.sendTransaction({
to: wallet.address,
value: 1,
})).wait();
const [wallet] = await TestBlsWallets(provider, 1);
const startBalance = await testErc20.balanceOf(wallet.address);
const mintAction: ActionData = {
ethValue: 0,
contractAddress: testErc20.address,
encodedFunction: testErc20.interface.encodeFunctionData(
"mint",
[wallet.address, 1],
),
};
const sendEthToTxOrigin = AggregatorUtilitiesFactory
.createInterface()
.encodeFunctionData("sendEthToTxOrigin");
const feeEstimation = await client.estimateFee(
await wallet.signWithGasEstimate({
nonce,
actions: [
mintAction,
{
ethValue: 1,
contractAddress: addresses.utilities,
encodedFunction: sendEthToTxOrigin,
},
],
}),
);
console.log({ feeEstimation });
assert(feeEstimation.feeType === "ether");
const feeRequired = BigNumber.from(feeEstimation.feeRequired);
// Add 10% safety margin
const fee = feeRequired.add(feeRequired.div(10));
const balance = await provider.getBalance(wallet.address);
// Ensure wallet can pay the fee
if (balance.lt(fee)) {
console.log("Funding wallet");
await (await adminWallet.sendTransaction({
to: wallet.address,
value: fee.sub(balance),
})).wait();
}
const feeAction: ActionData = {
ethValue: fee,
contractAddress: addresses.utilities,
encodedFunction: sendEthToTxOrigin,
};
const bundle = await wallet.signWithGasEstimate({
const bundle = wallet.sign({
nonce: await wallet.Nonce(),
actions: [mintAction, feeAction],
actions: [{
ethValue: 0,
contractAddress: testErc20.address,
encodedFunction: testErc20.interface.encodeFunctionData(
"mint",
[wallet.address, 1],
),
}],
});
// console.log("Calling estimateFee");
// const feeEstimation = await client.estimateFee(bundle);
// console.log({ feeEstimation });
console.log("Sending mint bundle to aggregator");
const res = await client.add(bundle);
@@ -108,7 +47,7 @@ if ("failures" in res) {
console.log("Success response from aggregator", res.hash);
while (true) {
const balance = await testErc20.balanceOf(wallet.address);
const balance = (await testErc20.balanceOf(wallet.address));
console.log({
startBalance: startBalance.toString(),

View File

@@ -1,10 +1,10 @@
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write --unstable
import { delay, ethers, MockERC20Factory } from "../deps.ts";
import { delay, ethers, MockERC20__factory } from "../deps.ts";
import EthereumService from "../src/app/EthereumService.ts";
import * as env from "../test/env.ts";
import TestBlsWallet from "./helpers/TestBlsWallet.ts";
import TestBlsWallets from "./helpers/TestBlsWallets.ts";
import getNetworkConfig from "../src/helpers/getNetworkConfig.ts";
const { addresses } = await getNetworkConfig();
@@ -14,14 +14,16 @@ const ethereumService = await EthereumService.create(
(evt) => {
console.log(evt);
},
addresses.verificationGateway,
addresses.utilities,
env.PRIVATE_KEY_AGG,
);
const testErc20 = MockERC20Factory.connect(addresses.testToken, provider);
const wallet = await TestBlsWallet(provider);
const testErc20 = MockERC20__factory.connect(addresses.testToken, provider);
const [wallet] = await TestBlsWallets(provider, 1);
const startBalance = await testErc20.balanceOf(wallet.address);
const bundle = await wallet.signWithGasEstimate({
const bundle = wallet.sign({
nonce: await wallet.Nonce(),
actions: [{
ethValue: 0,
@@ -45,7 +47,7 @@ console.log("Sending via ethereumService");
})();
while (true) {
const balance = await testErc20.balanceOf(wallet.address);
const balance = (await testErc20.balanceOf(wallet.address));
console.log({
startBalance: startBalance.toString(),

View File

@@ -1,144 +0,0 @@
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write
import {
ActionData,
AggregatorClient,
AggregatorUtilitiesFactory,
BigNumber,
Bundle,
delay,
ethers,
MockERC20Factory,
} from "../deps.ts";
import AdminWallet from "../src/chain/AdminWallet.ts";
import assert from "../src/helpers/assert.ts";
import getNetworkConfig from "../src/helpers/getNetworkConfig.ts";
import Range from "../src/helpers/Range.ts";
import * as env from "../test/env.ts";
import TestBlsWallet from "./helpers/TestBlsWallet.ts";
const [walletNStr] = Deno.args;
const walletN = Number(walletNStr);
if (!Number.isFinite(walletN)) {
console.error("Usage: ./manualTests/mintNViaAggregator.ts <N>");
Deno.exit(1);
}
const { addresses } = await getNetworkConfig();
const provider = new ethers.providers.JsonRpcProvider(env.RPC_URL);
const testErc20 = MockERC20Factory.connect(addresses.testToken, provider);
const client = new AggregatorClient(env.ORIGIN);
const sendEthToTxOrigin = AggregatorUtilitiesFactory
.createInterface()
.encodeFunctionData("sendEthToTxOrigin");
const adminWallet = AdminWallet(provider);
const wallets = await Promise.all(
Range(walletN).map((i) => TestBlsWallet(provider, i)),
);
const firstWallet = wallets[0];
const mintAction: ActionData = {
ethValue: 0,
contractAddress: testErc20.address,
encodedFunction: testErc20.interface.encodeFunctionData(
"mint",
[wallets[0].address, 1],
),
};
const startBalance = await testErc20.balanceOf(firstWallet.address);
const bundles: Bundle[] = [];
for (const [i, wallet] of wallets.entries()) {
const nonce = await wallet.Nonce();
console.log("Funding wallet", i, "(1 wei to make estimateFee work)");
await (await adminWallet.sendTransaction({
to: wallet.address,
value: 1,
})).wait();
const feeEstimation = await client.estimateFee(
await wallet.signWithGasEstimate({
nonce,
actions: [
mintAction,
{
ethValue: 1,
contractAddress: addresses.utilities,
encodedFunction: sendEthToTxOrigin,
},
],
}),
);
assert(feeEstimation.feeType === "ether");
const feeRequired = BigNumber.from(feeEstimation.feeRequired);
// Add 10% safety margin
const fee = feeRequired.add(feeRequired.div(10));
const balance = await provider.getBalance(wallet.address);
// Ensure wallet can pay the fee
if (balance.lt(fee)) {
console.log("Funding wallet", i, "(based on estimateFee)");
await (await adminWallet.sendTransaction({
to: wallet.address,
value: fee.sub(balance),
})).wait();
}
const feeAction: ActionData = {
ethValue: fee,
contractAddress: addresses.utilities,
encodedFunction: sendEthToTxOrigin,
};
bundles.push(
await wallet.signWithGasEstimate({
nonce,
actions: [mintAction, feeAction],
}),
);
}
console.log("Sending mint bundles to aggregator");
await Promise.all(bundles.map(async (bundle) => {
const res = await client.add(bundle);
if ("failures" in res) {
throw new Error(res.failures.map((f) => f.description).join(", "));
}
console.log("Success response from aggregator", res.hash);
}));
while (true) {
const balance = await testErc20.balanceOf(firstWallet.address);
console.log({
startBalance: startBalance.toString(),
balance: balance.toString(),
});
if (balance.sub(startBalance).gte(walletN)) {
console.log("done");
break;
}
console.log("Mints not completed, waiting 500ms");
await delay(500);
}

View File

@@ -1,34 +0,0 @@
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read
import { ContractsConnector, ethers } from "../deps.ts";
import * as env from "../src/env.ts";
import AdminWallet from "../src/chain/AdminWallet.ts";
import receiptOf from "./helpers/receiptOf.ts";
const provider = new ethers.providers.JsonRpcProvider(env.RPC_URL);
const adminWallet = AdminWallet(provider);
const connector = await ContractsConnector.create(adminWallet);
const addressRegistry = await connector.AddressRegistry();
const blsPublicKeyRegistry = await connector.BLSPublicKeyRegistry();
await receiptOf(
addressRegistry.register("0xCB1ca1e8DF1055636d7D07c3099c9de3c65CAAB4"),
);
await receiptOf(
blsPublicKeyRegistry.register(
// You can get this in Quill by running this in the console of the wallet
// page (the page you get by clicking on the extension icon)
// JSON.stringify(debug.wallets[0].blsWalletSigner.getPublicKey())
[
"0x0ad7e63a4bbfdad440beda1fe7fdfb77a59f2a6d991700c6cf4c3654a52389a9",
"0x0adaa93bdfda0f6b259a80c1af7ccf3451c35c1e175483927a8052bdbf59f801",
"0x1f56aa1bb1419c741f0a474e51f33da0ffc81ea870e2e2c440db72539a9efb9e",
"0x2f1f7e5d586d6ca5de3c8c198c3be3b998a2b6df7ee8a367a1e58f8b36fd524d",
],
),
);

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read
#!/usr/bin/env -S deno run --unstable --allow-net --allow-env --allow-read
import { ethers } from "../deps.ts";
import * as env from "../src/env.ts";

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write --unstable
import {
AggregatorClient,
@@ -6,14 +6,13 @@ import {
BlsWalletWrapper,
delay,
ethers,
MockERC20Factory,
MockERC20__factory,
} from "../deps.ts";
import * as env from "../test/env.ts";
import AdminWallet from "../src/chain/AdminWallet.ts";
import TestBlsWallet from "./helpers/TestBlsWallet.ts";
import TestBlsWallets from "./helpers/TestBlsWallets.ts";
import getNetworkConfig from "../src/helpers/getNetworkConfig.ts";
import Range from "../src/helpers/Range.ts";
const logStartTime = Date.now();
@@ -27,14 +26,7 @@ function log(...args: unknown[]) {
console.log(RelativeTimestamp(), ...args);
}
// Note: This value is a guess and may require some experimentation for optimal
// throughput. The size of a full aggregation used to be hardcoded in config,
// but now that we use gas to limit the bundle size we don't know this value
// upfront anymore.
const fullAggregationSize = 100;
const leadTarget = fullAggregationSize * env.MAX_UNCONFIRMED_AGGREGATIONS;
const leadTarget = env.MAX_AGGREGATION_SIZE * env.MAX_UNCONFIRMED_AGGREGATIONS;
const pollingInterval = 400;
const sendWalletCount = 50;
@@ -43,20 +35,21 @@ const { addresses } = await getNetworkConfig();
const provider = new ethers.providers.JsonRpcProvider(env.RPC_URL);
const adminWallet = AdminWallet(provider);
const testErc20 = MockERC20Factory.connect(addresses.testToken, provider);
const testErc20 = MockERC20__factory.connect(addresses.testToken, provider);
const client = new AggregatorClient(env.ORIGIN);
log("Connecting/creating test wallets...");
const [recvWallet, ...sendWallets] = await Promise.all(
Range(sendWalletCount + 1).map((i) => TestBlsWallet(provider, i)),
const [recvWallet, ...sendWallets] = await TestBlsWallets(
provider,
sendWalletCount + 1,
);
log("Checking/minting test tokens...");
for (const wallet of sendWallets) {
const testErc20 = MockERC20Factory.connect(
const testErc20 = MockERC20__factory.connect(
addresses.testToken,
adminWallet,
);
@@ -90,7 +83,7 @@ let txsAdded = 0;
let txsCompleted = 0;
let sendWalletIndex = 0;
pollingLoop(async () => {
pollingLoop(() => {
// Send transactions
const lead = txsSent - txsCompleted;
@@ -102,7 +95,7 @@ pollingLoop(async () => {
const nonce = nextNonceMap.get(sendWallet)!;
nextNonceMap.set(sendWallet, nonce.add(1));
const bundle = await sendWallet.signWithGasEstimate({
const bundle = sendWallet.sign({
nonce,
actions: [{
ethValue: 0,

View File

@@ -1,56 +0,0 @@
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write
import { AggregatorClient, ethers, MockERC20Factory } from "../deps.ts";
// import EthereumService from "../src/app/EthereumService.ts";
import * as env from "../test/env.ts";
import TestBlsWallet from "./helpers/TestBlsWallet.ts";
import getNetworkConfig from "../src/helpers/getNetworkConfig.ts";
const { addresses } = await getNetworkConfig();
const client = new AggregatorClient(env.ORIGIN);
const provider = new ethers.providers.JsonRpcProvider(env.RPC_URL);
// const ethereumService = await EthereumService.create(
// (evt) => {
// console.log(evt);
// },
// env.PRIVATE_KEY_AGG,
// );
const testErc20 = MockERC20Factory.connect(addresses.testToken, provider);
const wallet = await TestBlsWallet(provider);
const bundle = await wallet.signWithGasEstimate({
nonce: await wallet.Nonce(),
actions: [{
ethValue: 0,
contractAddress: testErc20.address,
encodedFunction: testErc20.interface.encodeFunctionData(
"transferFrom",
[
"0x0000000000000000000000000000000000000000",
wallet.address,
ethers.BigNumber.from(
"0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
),
],
),
}],
});
console.log("Sending via ethereumService or agg");
(async () => {
try {
// Test directly with ethereum service
// await ethereumService.submitBundle(bundle);
// test by submitting request to the agg
const res = await client.add(bundle);
console.log(res);
} catch (error) {
console.error(error.stack);
Deno.exit(1);
}
})();

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --allow-write
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read --unstable
import app from "../src/app/app.ts";
import AppEvent from "../src/app/AppEvent.ts";

View File

@@ -1,42 +1,24 @@
#!/usr/bin/env -S deno run --allow-run --allow-read --allow-write --allow-env
#!/usr/bin/env -S deno run --unstable --allow-run --allow-read --allow-write
import { dirname, parseArgs } from "../deps.ts";
import * as shell from "./helpers/shell.ts";
import repoDir from "../src/helpers/repoDir.ts";
import dotEnvPath, { envName } from "../src/helpers/dotEnvPath.ts";
import nil from "../src/helpers/nil.ts";
const parseArgsResult = parseArgs(Deno.args);
const args = {
/** Whether to push the image to dockerhub. */
push: parseArgsResult["push"],
/** Override the image name. Default: aggregator. */
imageName: parseArgsResult["image-name"],
/** Only build the image, ie - don't also serialize the image to disk. */
imageOnly: parseArgsResult["image-only"],
/** Prefix all docker commands with sudo. */
sudoDocker: parseArgsResult["sudo-docker"],
/** Tag the image with latest as well as the default git-${sha}. */
alsoTagLatest: parseArgsResult["also-tag-latest"],
};
const args = parseArgs(Deno.args);
Deno.chdir(repoDir);
const buildDir = `${repoDir}/build`;
await ensureFreshBuildDir();
await buildEnvironment();
await copyTypescriptFiles();
await buildDockerImage();
await tarballTypescriptFiles();
if (args.push) {
await pushDockerImage();
}
console.log("\nAggregator build complete");
console.log("Aggregator build complete");
async function allFiles() {
return [
@@ -50,7 +32,13 @@ async function allFiles() {
];
}
async function Tag() {
async function shortContentHash(filePath: string) {
const contentHash = (await shell.Line("shasum", "-a", "256", filePath));
return contentHash.slice(0, 7);
}
async function BuildName() {
const commitShort = (await shell.Line("git", "rev-parse", "HEAD")).slice(
0,
7,
@@ -59,10 +47,15 @@ async function Tag() {
const isDirty =
(await shell.Lines("git", "status", "--porcelain")).length > 0;
const envHashShort = await shortContentHash(`${buildDir}/.env`);
return [
"git",
commitShort,
...(isDirty ? ["dirty"] : []),
"env",
envName,
envHashShort,
].join("-");
}
@@ -81,6 +74,45 @@ async function ensureFreshBuildDir() {
await Deno.mkdir(buildDir);
}
async function buildEnvironment() {
const repoDotEnv = await Deno.readTextFile(dotEnvPath);
let networkConfigPaths: { repo: string; build: string } | nil = nil;
const buildDotEnvLines: string[] = [];
for (const line of repoDotEnv.split("\n")) {
let buildLine = line;
if (line.startsWith("NETWORK_CONFIG_PATH=")) {
const repoNetworkConfigPath = line.slice(
"NETWORK_CONFIG_PATH=".length,
);
const networkConfigHash = await shortContentHash(repoNetworkConfigPath);
networkConfigPaths = {
repo: repoNetworkConfigPath,
build: `networkConfig-${networkConfigHash}.json`,
};
// Need to replace this value with a build location because otherwise
// this file might not be included in the docker image
buildLine = `NETWORK_CONFIG_PATH=${networkConfigPaths.build}`;
}
buildDotEnvLines.push(buildLine);
}
if (networkConfigPaths !== nil) {
await Deno.copyFile(
networkConfigPaths.repo,
`${buildDir}/${networkConfigPaths.build}`,
);
}
await Deno.writeTextFile(`${buildDir}/.env`, buildDotEnvLines.join("\n"));
}
async function copyTypescriptFiles() {
for (const f of await allFiles()) {
if (!f.endsWith(".ts")) {
@@ -106,11 +138,9 @@ async function tarballTypescriptFiles() {
}
async function buildDockerImage() {
const tag = await Tag();
const imageName = args.imageName ?? "aggregator";
const imageNameAndTag = `${imageName}:${tag}`;
const buildName = await BuildName();
const sudoDockerArg = args.sudoDocker ? ["sudo"] : [];
const sudoDockerArg = args["sudo-docker"] === true ? ["sudo"] : [];
await shell.run(
...sudoDockerArg,
@@ -118,35 +148,18 @@ async function buildDockerImage() {
"build",
repoDir,
"-t",
imageNameAndTag,
`aggregator:${buildName}`,
);
if (args.alsoTagLatest) {
await shell.run(
...sudoDockerArg,
"docker",
"tag",
`${imageName}:${tag}`,
`${imageName}:latest`,
);
}
console.log("\nDocker image created:", imageNameAndTag);
if (args.imageName) {
return;
}
const dockerImageFileName = `${imageName}-${tag}-docker-image`;
const tarFilePath = `${repoDir}/build/${dockerImageFileName}.tar`;
const dockerImageName = `aggregator-${buildName}-docker-image`;
await shell.run(
...sudoDockerArg,
"docker",
"save",
"--output",
tarFilePath,
imageNameAndTag,
`${repoDir}/build/${dockerImageName}.tar`,
`aggregator:${buildName}`,
);
if (sudoDockerArg.length > 0) {
@@ -157,23 +170,12 @@ async function buildDockerImage() {
"sudo",
"chown",
username,
tarFilePath,
`${repoDir}/build/${dockerImageName}.tar`,
);
}
await shell.run("gzip", tarFilePath);
console.log(`Docker image saved: ${tarFilePath}.gz`);
}
async function pushDockerImage() {
const tag = await Tag();
const imageName = args.imageName ?? "aggregator";
const imageNameAndTag = `${imageName}:${tag}`;
await shell.run("docker", "push", imageNameAndTag);
if (args.alsoTagLatest) {
await shell.run("docker", "push", `${imageName}:latest`);
}
await shell.run(
"gzip",
`${repoDir}/build/${dockerImageName}.tar`,
);
}

View File

@@ -1,5 +0,0 @@
#!/usr/bin/env -S deno run --allow-run --allow-read --allow-write --allow-env
import { checkTs } from "./helpers/typescript.ts";
await checkTs();

View File

@@ -16,9 +16,10 @@ deno test \
--allow-env \
--allow-read \
--coverage=cov_profile \
--unstable \
test/*.test.ts
deno coverage cov_profile --lcov >cov_profile/profile.lcov
deno coverage cov_profile --unstable --lcov >cov_profile/profile.lcov
genhtml -o cov_profile/html cov_profile/profile.lcov

View File

@@ -1,43 +0,0 @@
#!/usr/bin/env -S deno run --allow-net --allow-env --allow-read
import {
BlsWalletWrapper,
ethers,
VerificationGatewayFactory,
Wallet,
} from "../deps.ts";
import * as env from "../src/env.ts";
import getNetworkConfig from "../src/helpers/getNetworkConfig.ts";
const provider = new ethers.providers.JsonRpcProvider(env.RPC_URL);
const wallet = new Wallet(env.PRIVATE_KEY_AGG, provider);
const { addresses } = await getNetworkConfig();
const vg = VerificationGatewayFactory.connect(
addresses.verificationGateway,
wallet,
);
const internalBlsWallet = await BlsWalletWrapper.connect(
env.PRIVATE_KEY_AGG,
addresses.verificationGateway,
provider,
);
console.log("Connected internal wallet:", internalBlsWallet.address);
const nonce = await internalBlsWallet.Nonce();
if (!nonce.eq(0)) {
console.log("Already exists with nonce", nonce.toNumber());
} else {
await (await vg.processBundle(
await internalBlsWallet.signWithGasEstimate({
nonce: 0,
actions: [],
}),
)).wait();
console.log("Created successfully");
}

View File

@@ -0,0 +1,15 @@
#!/usr/bin/env -S deno run --unstable --allow-net --allow-read --allow-env
// Useful for when breaking database changes are made.
import createQueryClient from "../src/app/createQueryClient.ts";
import * as env from "../src/env.ts";
import BundleTable from "../src/app/BundleTable.ts";
const queryClient = createQueryClient(() => {});
for (const tableName of [env.BUNDLE_TABLE_NAME]) {
const table = await BundleTable.create(queryClient, tableName);
await table.drop();
console.log(`dropped table ${tableName}`);
}

View File

@@ -1,13 +0,0 @@
import * as shell from "./shell.ts";
export async function allFiles() {
return [
...await shell.Lines("git", "ls-files"),
...await shell.Lines(
"git",
"ls-files",
"--others",
"--exclude-standard",
),
];
}

View File

@@ -1,22 +0,0 @@
import * as shell from "./shell.ts";
import { allFiles } from "./git.ts";
// TODO (merge-ok) Consider turning this into a standard eslint rule
export async function lintTodosFixmes(): Promise<void> { // merge-ok
const searchArgs = [
"egrep",
"--color",
"-ni",
"todo|fixme", // merge-ok
...(await allFiles()),
];
const matches = await shell.Lines(...searchArgs);
const notOkMatches = matches.filter((m) => !m.includes("merge-ok"));
if (notOkMatches.length > 0) {
console.error(notOkMatches.join("\n"));
throw new Error(`${notOkMatches.length} todos/fixmes found`); // merge-ok
}
}

View File

@@ -1,6 +1,4 @@
export async function run(...cmd: string[]): Promise<void> {
// https://github.com/web3well/bls-wallet/issues/595
// deno-lint-ignore no-deprecated-deno-api
const process = Deno.run({ cmd, stdout: "inherit", stderr: "inherit" });
const unloadListener = () => {
@@ -22,8 +20,6 @@ export async function run(...cmd: string[]): Promise<void> {
}
export async function String(...cmd: string[]): Promise<string> {
// https://github.com/web3well/bls-wallet/issues/595
// deno-lint-ignore no-deprecated-deno-api
const process = Deno.run({ cmd, stdout: "piped" });
if (process.stdout === null) {

View File

@@ -1,25 +0,0 @@
import { allFiles } from "./git.ts";
import * as shell from "./shell.ts";
import nil from "../../src/helpers/nil.ts";
import repoDir from "../../src/helpers/repoDir.ts";
export async function checkTs(): Promise<void> {
let testFilePath: string | nil = nil;
try {
const tsFiles = (await allFiles()).filter((f) => f.endsWith(".ts"));
testFilePath = await Deno.makeTempFile({ suffix: ".ts" });
await Deno.writeTextFile(
testFilePath,
tsFiles.map((f) => `import "${repoDir}/${f}";`).join("\n"),
);
await shell.run("deno", "check", testFilePath);
} finally {
if (testFilePath !== nil) {
await Deno.remove(testFilePath);
}
}
}

View File

@@ -1,7 +0,0 @@
#!/usr/bin/env -S deno run --allow-run --allow-read --allow-env
// TODO (merge-ok) Consider turning this into a standard eslint rule
import { lintTodosFixmes } from "./helpers/lint.ts"; // merge-ok
await lintTodosFixmes(); // merge-ok

View File

@@ -1,9 +1,8 @@
#!/usr/bin/env -S deno run --allow-run --allow-read --allow-write --allow-env
#!/usr/bin/env -S deno run --unstable --allow-run --allow-read --allow-write --allow-env
import { lintTodosFixmes } from "./helpers/lint.ts"; // merge-ok
import { checkTs } from "./helpers/typescript.ts";
import * as shell from "./helpers/shell.ts";
import repoDir from "../src/helpers/repoDir.ts";
import nil from "../src/helpers/nil.ts";
import { envName } from "../src/helpers/dotEnvPath.ts";
Deno.chdir(repoDir);
@@ -28,8 +27,44 @@ function Checks(): Check[] {
["lint", async () => {
await shell.run("deno", "lint", ".");
}],
["todos and fixmes", lintTodosFixmes], // merge-ok
["typescript", checkTs],
["todos and fixmes", async () => { // merge-ok
const searchArgs = [
"egrep",
"--color",
"-ni",
"todo|fixme", // merge-ok
...(await allFiles()),
];
const matches = await shell.Lines(...searchArgs);
const notOkMatches = matches.filter((m) => !m.includes("merge-ok"));
if (notOkMatches.length > 0) {
console.error(notOkMatches.join("\n"));
throw new Error(`${notOkMatches.length} todos/fixmes found`); // merge-ok
}
}],
["typescript", async () => {
let testFilePath: string | nil = nil;
try {
const tsFiles = (await allFiles()).filter((f) => f.endsWith(".ts"));
testFilePath = await Deno.makeTempFile({ suffix: ".ts" });
await Deno.writeTextFile(
testFilePath,
tsFiles.map((f) => `import "${repoDir}/${f}";`).join("\n"),
);
await shell.run("deno", "cache", "--unstable", testFilePath);
} finally {
if (testFilePath !== nil) {
await Deno.remove(testFilePath);
}
}
}],
["test", async () => {
await shell.run(
"deno",
@@ -42,6 +77,7 @@ function Checks(): Check[] {
"--allow-net",
"--allow-env",
"--allow-read",
"--unstable",
"--",
"--env",
envName,
@@ -49,3 +85,15 @@ function Checks(): Check[] {
}],
];
}
async function allFiles() {
return [
...await shell.Lines("git", "ls-files"),
...await shell.Lines(
"git",
"ls-files",
"--others",
"--exclude-standard",
),
];
}

View File

@@ -1,14 +1,19 @@
#!/usr/bin/env -S deno run --allow-net --allow-read --allow-write --allow-env
#!/usr/bin/env -S deno run --unstable --allow-net --allow-read --unstable
import { BigNumber, sqlite } from "../deps.ts";
import { BigNumber } from "../deps.ts";
import createQueryClient from "../src/app/createQueryClient.ts";
import * as env from "../src/env.ts";
import BundleTable from "../src/app/BundleTable.ts";
const table = new BundleTable(new sqlite.DB(env.DB_PATH));
const queryClient = createQueryClient(() => {});
console.log(table.count());
console.log(table.all().map((bun) => bun.id));
console.log(
"findEligible",
table.findEligible(BigNumber.from(0), 1000).map((bun) => bun.id)
);
for (const tableName of [env.BUNDLE_TABLE_NAME]) {
const table = await BundleTable.create(queryClient, tableName);
console.log(tableName, await table.count());
console.log(tableName, (await table.all()).map((bun) => bun.id));
console.log(
tableName,
"findEligible",
(await table.findEligible(BigNumber.from(0), 1000)).map((bun) => bun.id),
);
}

View File

@@ -1,43 +0,0 @@
#!/bin/bash
set -euo pipefail
if [ -z ${VERSION+x} ]; then
>&2 echo "Missing VERSION. Needs to match the first 7 characters of the git sha used to build the docker image."
>&2 echo "Usage: VERSION=abc1234 start-docker.sh"
exit 1
fi
ENV_PATH="${ENV_PATH:=.env}"
# Normalize ENV_PATH to an absolute path
if [[ $(echo $ENV_PATH | head -c1) != "/" ]]; then
ENV_PATH="$(cd $(dirname $ENV_PATH) && pwd)/$(basename $ENV_PATH)"
fi
echo "Using env" $ENV_PATH
PORT=$(cat $ENV_PATH | grep '^PORT=' | tail -n1 | sed 's/^PORT=//')
NETWORK_CONFIG_PATH=$(cat $ENV_PATH | grep '^NETWORK_CONFIG_PATH=' | tail -n1 | sed 's/^NETWORK_CONFIG_PATH=//')
# Normalize NETWORK_CONFIG_PATH to an absolute path
if [[ $(echo $NETWORK_CONFIG_PATH | head -c1) != "/" ]]; then
NETWORK_CONFIG_PATH="$(cd $(dirname $ENV_PATH) && cd $(dirname $NETWORK_CONFIG_PATH) && pwd)/$(basename $NETWORK_CONFIG_PATH)"
fi
echo "Using network config" $NETWORK_CONFIG_PATH
NETWORK=$(basename $NETWORK_CONFIG_PATH .json)
CONTAINER_NAME="aggregator-$VERSION-$NETWORK"
IMAGE_NAME="aggregator:git-$VERSION"
echo "Creating $CONTAINER_NAME using $IMAGE_NAME"
docker run \
--name "$CONTAINER_NAME" \
-d \
--net=host \
--restart=unless-stopped \
--mount type=bind,source="$ENV_PATH",target=/app/.env \
--mount type=bind,source="$NETWORK_CONFIG_PATH",target=/app/networkConfig.json \
"$IMAGE_NAME"

View File

@@ -5,15 +5,15 @@ import AdminService from "./AdminService.ts";
export default function AdminRouter(adminService: AdminService) {
const router = new Router({ prefix: "/admin/" });
router.get("countTxs", (ctx) => {
const c = adminService.bundleCount();
router.get("countTxs", async (ctx) => {
const c = await adminService.bundleCount();
console.log(`Returning count ${c}\n`);
ctx.response.headers.set("Content-Type", "application/json");
ctx.response.body = c;
});
router.get("resetTxs", (ctx) => {
adminService.resetBundles();
router.get("resetTxs", async (ctx) => {
await adminService.resetBundles();
ctx.response.body = "Transactions reset";
});

View File

@@ -7,11 +7,11 @@ export default class AdminService {
private bundleTable: BundleTable,
) {}
resetBundles() {
this.bundleTable.clear();
async resetBundles() {
await this.bundleTable.clear();
}
bundleCount(): number {
return this.bundleTable.count();
async bundleCount(): Promise<bigint> {
return await this.bundleTable.count();
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -4,8 +4,6 @@ import BundleHandler from "./helpers/BundleHandler.ts";
import AggregationStrategy from "./AggregationStrategy.ts";
import AsyncReturnType from "../helpers/AsyncReturnType.ts";
import ClientReportableError from "./helpers/ClientReportableError.ts";
import nil from "../helpers/nil.ts";
import never from "./helpers/never.ts";
export default function AggregationStrategyRouter(
aggregationStrategy: AggregationStrategy,
@@ -30,19 +28,7 @@ export default function AggregationStrategyRouter(
}
ctx.response.body = {
feeType: (() => {
const feesConfig = aggregationStrategy.config.fees;
if (feesConfig === nil || feesConfig.type === "ether") {
return "ether";
}
if (feesConfig.type === "token") {
return `token:${feesConfig.address}`;
}
never(feesConfig);
})(),
feeType: aggregationStrategy.config.fees.type,
feeDetected: result.feeDetected.toString(),
feeRequired: result.feeRequired.toString(),
successes: result.successes,

View File

@@ -1,57 +1,12 @@
import { HTTPMethods } from "../../deps.ts";
type AppEvent =
| { type: "starting" }
type AppEvent = (
| { type: "listening"; data: { port: number } }
| { type: "db-query"; data: { sql: string; params: unknown } }
| { type: "db-query"; data: { sql: string; params: unknown[] } }
| { type: "waiting-unconfirmed-space" }
| {
type: "running-strategy";
data: {
eligibleRows: number;
};
}
| {
type: "completed-strategy";
data: {
includedRows: number;
bundleOverheadCost: string;
bundleOverheadLen: number;
expectedFee: string;
expectedMaxCost: string;
};
}
| {
type: "failed-row";
data: {
publicKeyShorts: string[];
submitError?: string;
};
}
| {
type: "aggregate-bundle-unprofitable";
data: {
reason?: string;
};
}
| {
type: "aggregate-bundle-exceeds-max-gas";
data: {
hash: string;
gasEstimate: number;
aggregateGasEstimate: number;
maxGasPerBundle: number;
};
}
| { type: "unprofitable-despite-breakeven-operations" }
| {
type: "submission-attempt";
data: {
publicKeyShorts: string[];
attemptNumber: number;
txLen: number;
compressedTxLen: number;
};
data: { publicKeyShorts: string[]; attemptNumber: number };
}
| {
type: "submission-attempt-failed";
@@ -64,16 +19,7 @@ type AppEvent =
| { type: "submission-sent"; data: { hash: string } }
| {
type: "submission-confirmed";
data: {
hash: string;
bundleHashes: string[];
blockNumber: number;
profit: string;
cost: string;
expectedMaxCost: string;
actualFee: string;
expectedFee: string;
};
data: { hash: string; bundleHashes: string[], blockNumber: number };
}
| { type: "warning"; data: string }
| {
@@ -102,6 +48,7 @@ type AppEvent =
status: number;
duration: number;
};
};
}
);
export default AppEvent;

View File

@@ -2,6 +2,7 @@ import { Router } from "../../deps.ts";
import failRequest from "./helpers/failRequest.ts";
import BundleHandler from "./helpers/BundleHandler.ts";
import nil from "../helpers/nil.ts";
import BundleService from "./BundleService.ts";
export default function BundleRouter(bundleService: BundleService) {
@@ -22,34 +23,15 @@ export default function BundleRouter(bundleService: BundleService) {
router.get(
"bundleReceipt/:hash",
(ctx) => {
const bundleRow = bundleService.lookupBundle(ctx.params.hash!);
async (ctx) => {
const receipt = await bundleService.lookupReceipt(ctx.params.hash!);
if (bundleRow?.receipt === nil) {
ctx.response.status = 404;
ctx.response.body = {
submitError: bundleRow?.submitError,
};
return;
}
ctx.response.body = bundleService.receiptFromBundle(bundleRow);
},
);
router.get(
"aggregateBundle/:subBundleHash",
(ctx) => {
const bundleRows = bundleService.lookupAggregateBundle(ctx.params.subBundleHash!);
if (bundleRows === nil || !bundleRows?.length) {
if (receipt === nil) {
ctx.response.status = 404;
return;
}
ctx.response.body = bundleRows;
ctx.response.body = receipt;
},
);

View File

@@ -1,12 +1,10 @@
import {
BigNumber,
BlsWalletSigner,
BlsWalletWrapper,
Bundle,
delay,
ethers,
Semaphore,
VerificationGatewayFactory,
QueryClient,
} from "../../deps.ts";
import { IClock } from "../helpers/Clock.ts";
@@ -19,32 +17,33 @@ import * as env from "../env.ts";
import runQueryGroup from "./runQueryGroup.ts";
import EthereumService from "./EthereumService.ts";
import AppEvent from "./AppEvent.ts";
import BundleTable, { BundleRow } from "./BundleTable.ts";
import BundleTable, { BundleRow, makeHash } from "./BundleTable.ts";
import countActions from "./helpers/countActions.ts";
import plus from "./helpers/plus.ts";
import AggregationStrategy from "./AggregationStrategy.ts";
import nil from "../helpers/nil.ts";
import getOptimismL1Fee from "../helpers/getOptimismL1Fee.ts";
import ExplicitAny from "../helpers/ExplicitAny.ts";
export type AddBundleResponse = { hash: string } | {
failures: TransactionFailure[];
};
export type AddBundleResponse = { hash: string } | { failures: TransactionFailure[] };
export default class BundleService {
static defaultConfig = {
bundleQueryLimit: env.BUNDLE_QUERY_LIMIT,
breakevenOperationCount: env.BREAKEVEN_OPERATION_COUNT,
maxAggregationSize: env.MAX_AGGREGATION_SIZE,
maxAggregationDelayMillis: env.MAX_AGGREGATION_DELAY_MILLIS,
maxUnconfirmedAggregations: env.MAX_UNCONFIRMED_AGGREGATIONS,
maxEligibilityDelay: env.MAX_ELIGIBILITY_DELAY,
isOptimism: env.IS_OPTIMISM,
};
unconfirmedBundles = new Set<Bundle>();
unconfirmedActionCount = 0;
unconfirmedRowIds = new Set<number>();
submissionSemaphore: Semaphore;
// TODO (merge-ok) use database table in the future to persist
confirmedBundles = new Map<string, {
bundle: Bundle,
receipt: ethers.ContractReceipt,
}>();
submissionTimer: SubmissionTimer;
submissionsInProgress = 0;
@@ -55,6 +54,7 @@ export default class BundleService {
constructor(
public emit: (evt: AppEvent) => void,
public clock: IClock,
public queryClient: QueryClient,
public bundleTableMutex: Mutex,
public bundleTable: BundleTable,
public blsWalletSigner: BlsWalletSigner,
@@ -62,24 +62,25 @@ export default class BundleService {
public aggregationStrategy: AggregationStrategy,
public config = BundleService.defaultConfig,
) {
this.submissionSemaphore = new Semaphore(config.maxUnconfirmedAggregations);
this.submissionTimer = new SubmissionTimer(
clock,
config.maxAggregationDelayMillis,
() => this.runSubmission(),
);
this.ethereumService.provider.on("block", this.handleBlock);
}
(async () => {
await delay(100);
handleBlock = () => {
this.addTask(() => this.tryAggregating());
};
while (!this.stopping) {
this.tryAggregating();
// TODO (merge-ok): Stop if there aren't any bundles?
await this.ethereumService.waitForNextBlock();
}
})();
}
async stop() {
this.stopping = true;
this.ethereumService.provider.off("block", this.handleBlock);
await Promise.all(Array.from(this.pendingTaskPromises));
this.stopped = true;
}
@@ -107,19 +108,19 @@ export default class BundleService {
return;
}
const eligibleRows = this.bundleTable.findEligible(
const eligibleRows = await this.bundleTable.findEligible(
await this.ethereumService.BlockNumber(),
this.config.bundleQueryLimit,
);
const opCount = eligibleRows
const actionCount = eligibleRows
.filter((r) => !this.unconfirmedRowIds.has(r.id))
.map((r) => r.bundle.operations.length)
.map((r) => countActions(r.bundle))
.reduce(plus, 0);
if (opCount >= this.config.breakevenOperationCount) {
if (actionCount >= this.config.maxAggregationSize) {
this.submissionTimer.trigger();
} else if (opCount > 0) {
} else if (actionCount > 0) {
this.submissionTimer.notifyActive();
} else {
this.submissionTimer.clear();
@@ -129,8 +130,8 @@ export default class BundleService {
runQueryGroup<T>(body: () => Promise<T>): Promise<T> {
return runQueryGroup(
this.emit,
(sql) => this.bundleTable.dbQuery(sql),
this.bundleTableMutex,
this.queryClient,
body,
);
}
@@ -150,25 +151,14 @@ export default class BundleService {
};
}
const walletAddresses = await Promise.all(bundle.senderPublicKeys.map(
(pubKey) =>
BlsWalletWrapper.AddressFromPublicKey(
pubKey,
this.ethereumService.verificationGateway,
),
));
const signedCorrectly = this.blsWalletSigner.verify(bundle);
const failures: TransactionFailure[] = [];
const signedCorrectly = this.blsWalletSigner.verify(
bundle,
walletAddresses,
);
if (!signedCorrectly) {
if (signedCorrectly === false) {
failures.push({
type: "invalid-signature",
description:
`invalid bundle signature for signature ${bundle.signature}`,
description: "invalid signature",
});
}
@@ -179,10 +169,9 @@ export default class BundleService {
}
return await this.runQueryGroup(async () => {
const hash = await this.hashBundle(bundle);
const hash = makeHash();
this.bundleTable.add({
status: "pending",
await this.bundleTable.add({
hash,
bundle,
eligibleAfter: await this.ethereumService.BlockNumber(),
@@ -203,90 +192,31 @@ export default class BundleService {
});
}
lookupBundle(hash: string) {
return this.bundleTable.findBundle(hash);
}
// TODO (merge-ok) Remove lint ignore when this hits db
// deno-lint-ignore require-await
async lookupReceipt(hash: string) {
const confirmation = this.confirmedBundles.get(hash);
lookupAggregateBundle(subBundleHash: string) {
const subBundle = this.bundleTable.findBundle(subBundleHash);
return this.bundleTable.findAggregateBundle(subBundle?.aggregateHash!);
}
receiptFromBundle(bundle: BundleRow) {
if (!bundle.receipt) {
if (!confirmation) {
return nil;
}
const { receipt, hash, aggregateHash } = bundle;
const receipt = confirmation.receipt;
return {
bundleHash: hash,
aggregateBundleHash: aggregateHash,
to: receipt.to,
from: receipt.from,
contractAddress: receipt.contractAddress,
transactionIndex: receipt.transactionIndex,
root: receipt.root,
gasUsed: receipt.gasUsed,
logsBloom: receipt.logsBloom,
blockHash: receipt.blockHash,
transactionHash: receipt.transactionHash,
logs: receipt.logs,
blockNumber: receipt.blockNumber,
confirmations: receipt.confirmations,
cumulativeGasUsed: receipt.cumulativeGasUsed,
effectiveGasPrice: receipt.effectiveGasPrice,
byzantium: receipt.byzantium,
type: receipt.type,
status: receipt.status,
};
}
async hashBundle(bundle: Bundle): Promise<string> {
const operationsWithZeroGas = bundle.operations.map((operation) => {
return {
...operation,
gas: BigNumber.from(0),
};
});
const verifyMethodName = "verify";
const bundleType = VerificationGatewayFactory.abi.find(
(entry) => "name" in entry && entry.name === verifyMethodName,
)?.inputs[0];
const validatedBundle = {
...bundle,
operations: operationsWithZeroGas,
};
const encodedBundleWithZeroSignature = ethers.utils.defaultAbiCoder.encode(
[bundleType as ExplicitAny],
[
{
...validatedBundle,
signature: [BigNumber.from(0), BigNumber.from(0)],
},
],
);
const bundleHash = ethers.utils.keccak256(encodedBundleWithZeroSignature);
const chainId = (await this.ethereumService.provider.getNetwork()).chainId;
const bundleAndChainIdEncoding = ethers.utils.defaultAbiCoder.encode(
["bytes32", "uint256"],
[bundleHash, chainId],
);
return ethers.utils.keccak256(bundleAndChainIdEncoding);
}
async runSubmission() {
this.submissionsInProgress++;
const bundleSubmitted = await this.runQueryGroup(async () => {
const submissionResult = await this.runQueryGroup(async () => {
const currentBlockNumber = await this.ethereumService.BlockNumber();
let eligibleRows = this.bundleTable.findEligible(
let eligibleRows = await this.bundleTable.findEligible(
currentBlockNumber,
this.config.bundleQueryLimit,
);
@@ -296,89 +226,38 @@ export default class BundleService {
(row) => !this.unconfirmedRowIds.has(row.id),
);
this.emit({
type: "running-strategy",
data: {
eligibleRows: eligibleRows.length,
},
});
const {
aggregateBundle,
includedRows,
bundleOverheadCost,
bundleOverheadLen,
expectedFee,
expectedMaxCost,
failedRows,
} = await this
const { aggregateBundle, includedRows, failedRows } = await this
.aggregationStrategy.run(eligibleRows);
this.emit({
type: "completed-strategy",
data: {
includedRows: includedRows.length,
bundleOverheadCost: ethers.utils.formatEther(bundleOverheadCost),
bundleOverheadLen,
expectedFee: ethers.utils.formatEther(expectedFee),
expectedMaxCost: ethers.utils.formatEther(expectedMaxCost),
},
});
if (aggregateBundle) {
const aggregateBundleHash = await this.hashBundle(aggregateBundle);
for (const row of includedRows) {
row.aggregateHash = aggregateBundleHash;
}
}
for (const failedRow of failedRows) {
this.emit({
type: "failed-row",
data: {
publicKeyShorts: failedRow.bundle.senderPublicKeys.map(
toShortPublicKey,
),
submitError: failedRow.submitError,
},
});
this.handleFailedRow(failedRow, currentBlockNumber);
await this.handleFailedRow(failedRow, currentBlockNumber);
}
if (!aggregateBundle || includedRows.length === 0) {
return false;
return;
}
await this.submitAggregateBundle(
aggregateBundle,
includedRows,
expectedFee,
expectedMaxCost,
);
return true;
});
this.submissionsInProgress--;
this.addTask(() => this.tryAggregating());
if (bundleSubmitted) {
this.addTask(() => this.tryAggregating());
}
return submissionResult;
}
handleFailedRow(row: BundleRow, currentBlockNumber: BigNumber) {
async handleFailedRow(row: BundleRow, currentBlockNumber: BigNumber) {
if (row.nextEligibilityDelay.lte(this.config.maxEligibilityDelay)) {
this.bundleTable.update({
await this.bundleTable.update({
...row,
eligibleAfter: currentBlockNumber.add(row.nextEligibilityDelay),
nextEligibilityDelay: row.nextEligibilityDelay.mul(2),
});
} else {
this.bundleTable.update({
...row,
status: "failed",
});
await this.bundleTable.remove(row);
}
this.unconfirmedRowIds.delete(row.id);
@@ -387,10 +266,23 @@ export default class BundleService {
async submitAggregateBundle(
aggregateBundle: Bundle,
includedRows: BundleRow[],
expectedFee: BigNumber,
expectedMaxCost: BigNumber,
) {
const releaseSemaphore = await this.submissionSemaphore.acquire();
const maxUnconfirmedActions = (
this.config.maxUnconfirmedAggregations *
this.config.maxAggregationSize
);
const actionCount = countActions(aggregateBundle);
while (
this.unconfirmedActionCount + actionCount > maxUnconfirmedActions
) {
// FIXME (merge-ok): Polling
this.emit({ type: "waiting-unconfirmed-space" });
await delay(1000);
}
this.unconfirmedActionCount += actionCount;
this.unconfirmedBundles.add(aggregateBundle);
for (const row of includedRows) {
@@ -399,62 +291,36 @@ export default class BundleService {
this.addTask(async () => {
try {
const balanceBefore = await this.ethereumService.wallet.getBalance();
const receipt = await this.ethereumService.submitBundle(
aggregateBundle,
Infinity,
300,
);
const balanceAfter = await this.ethereumService.wallet.getBalance();
for (const row of includedRows) {
this.bundleTable.update({
...row,
this.confirmedBundles.set(row.hash, {
bundle: row.bundle,
receipt,
status: "confirmed",
});
}
const profit = balanceAfter.sub(balanceBefore);
/** What we paid to process the bundle */
let cost = receipt.gasUsed.mul(receipt.effectiveGasPrice);
if (this.config.isOptimism) {
cost = cost.add(
await getOptimismL1Fee(
this.ethereumService.provider,
receipt.transactionHash,
),
);
}
/** Fees collected from users */
const actualFee = profit.add(cost);
this.emit({
type: "submission-confirmed",
data: {
hash: receipt.transactionHash,
bundleHashes: includedRows.map((row) => row.hash),
blockNumber: receipt.blockNumber,
profit: ethers.utils.formatEther(profit),
cost: ethers.utils.formatEther(cost),
expectedMaxCost: ethers.utils.formatEther(expectedMaxCost),
actualFee: ethers.utils.formatEther(actualFee),
expectedFee: ethers.utils.formatEther(expectedFee),
},
});
await this.bundleTable.remove(...includedRows);
} finally {
this.unconfirmedActionCount -= actionCount;
this.unconfirmedBundles.delete(aggregateBundle);
for (const row of includedRows) {
this.unconfirmedRowIds.delete(row.id);
}
releaseSemaphore();
}
});
}

View File

@@ -3,308 +3,177 @@ import {
Bundle,
bundleFromDto,
bundleToDto,
Constraint,
CreateTableMode,
DataType,
ethers,
sqlite,
QueryClient,
QueryTable,
TableOptions,
unsketchify,
} from "../../deps.ts";
import assertExists from "../helpers/assertExists.ts";
import ExplicitAny from "../helpers/ExplicitAny.ts";
import { parseBundleDto } from "./parsers.ts";
import nil from "../helpers/nil.ts";
import assert from "../helpers/assert.ts";
/**
* Representation used when talking to the database. It's 'raw' in the sense
* that it only uses primitive types, because the database cannot know about
* custom classes like BigNumber.
*
* Note that this isn't as raw as it used to be - sqlite returns each row as an
* array. This is still the raw representation of each field though.
*/
type RawRow = {
id: number;
status: string;
hash: string;
bundle: string;
eligibleAfter: string;
nextEligibilityDelay: string;
submitError: string | null;
receipt: string | null;
aggregateHash: string | null;
};
const BundleStatuses = ["pending", "confirmed", "failed"] as const;
type BundleStatus = typeof BundleStatuses[number];
type Row = {
id: number;
status: BundleStatus;
hash: string;
bundle: Bundle;
eligibleAfter: BigNumber;
nextEligibilityDelay: BigNumber;
submitError?: string;
receipt?: ethers.ContractReceipt;
aggregateHash?: string;
};
type InsertRow = Omit<Row, "id">;
type InsertRawRow = Omit<RawRow, "id">;
export function makeHash() {
const buf = new Uint8Array(32);
crypto.getRandomValues(buf);
return ethers.utils.hexlify(buf);
}
export type BundleRow = Row;
function fromRawRow(rawRow: RawRow | sqlite.Row): Row {
if (Array.isArray(rawRow)) {
rawRow = {
id: rawRow[0] as number,
status: rawRow[1] as string,
hash: rawRow[2] as string,
bundle: rawRow[3] as string,
eligibleAfter: rawRow[4] as string,
nextEligibilityDelay: rawRow[5] as string,
submitError: rawRow[6] as string | null,
receipt: rawRow[7] as string | null,
aggregateHash: rawRow[8] as string | null,
};
const tableOptions: TableOptions = {
id: { type: DataType.Serial, constraint: Constraint.PrimaryKey },
hash: { type: DataType.VarChar },
bundle: { type: DataType.VarChar },
eligibleAfter: { type: DataType.VarChar },
nextEligibilityDelay: { type: DataType.VarChar },
};
function fromRawRow(rawRow: RawRow): Row {
const parseResult = parseBundleDto(JSON.parse(rawRow.bundle));
if ("failures" in parseResult) {
throw new Error(parseResult.failures.join("\n"));
}
const parseBundleResult = parseBundleDto(
JSON.parse(rawRow.bundle),
);
if ("failures" in parseBundleResult) {
throw new Error(parseBundleResult.failures.join("\n"));
}
const status = rawRow.status;
if (!isValidStatus(status)) {
throw new Error(`Not a valid bundle status: ${status}`);
}
const rawReceipt = rawRow.receipt;
const receipt: ethers.ContractReceipt = rawReceipt
? JSON.parse(rawReceipt)
: nil;
return {
id: rawRow.id,
status,
hash: rawRow.hash,
bundle: bundleFromDto(parseBundleResult.success),
...rawRow,
bundle: bundleFromDto(parseResult.success),
eligibleAfter: BigNumber.from(rawRow.eligibleAfter),
nextEligibilityDelay: BigNumber.from(rawRow.nextEligibilityDelay),
submitError: rawRow.submitError ?? nil,
receipt,
aggregateHash: rawRow.aggregateHash ?? nil,
};
}
function toInsertRawRow(row: InsertRow): InsertRawRow {
return {
...row,
submitError: row.submitError ?? null,
bundle: JSON.stringify(bundleToDto(row.bundle)),
eligibleAfter: toUint256Hex(row.eligibleAfter),
nextEligibilityDelay: toUint256Hex(row.nextEligibilityDelay),
aggregateHash: row.aggregateHash ?? null,
receipt: JSON.stringify(row.receipt),
};
}
function toRawRow(row: Row): RawRow {
return {
id: row.id,
status: row.status,
hash: row.hash,
bundle: JSON.stringify(row.bundle),
...row,
bundle: JSON.stringify(bundleToDto(row.bundle)),
eligibleAfter: toUint256Hex(row.eligibleAfter),
nextEligibilityDelay: toUint256Hex(row.nextEligibilityDelay),
submitError: row.submitError ?? null,
receipt: JSON.stringify(row.receipt),
aggregateHash: row.aggregateHash ?? null,
};
}
export default class BundleTable {
constructor(
public db: sqlite.DB,
public onQuery = (_sql: string, _params?: sqlite.QueryParameterSet) => {},
queryTable: QueryTable<RawRow>;
safeName: string;
private constructor(public queryClient: QueryClient, tableName: string) {
this.queryTable = this.queryClient.table<RawRow>(tableName);
this.safeName = unsketchify(this.queryTable.name);
}
static async create(
queryClient: QueryClient,
tableName: string,
): Promise<BundleTable> {
const table = new BundleTable(queryClient, tableName);
await table.queryTable.create(tableOptions, CreateTableMode.IfNotExists);
return table;
}
static async createFresh(
queryClient: QueryClient,
tableName: string,
) {
this.dbQuery(`
CREATE TABLE IF NOT EXISTS bundles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
status TEXT NOT NULL,
hash TEXT NOT NULL,
bundle TEXT NOT NULL,
eligibleAfter TEXT NOT NULL,
nextEligibilityDelay TEXT NOT NULL,
submitError TEXT,
receipt TEXT,
aggregateHash TEXT
)
`);
}
const table = new BundleTable(queryClient, tableName);
await table.queryTable.drop(true);
await table.queryTable.create(tableOptions, CreateTableMode.IfNotExists);
dbQuery(sql: string, params?: sqlite.QueryParameterSet) {
this.onQuery(sql, params);
return this.db.query(sql, params);
return table;
}
add(...rows: InsertRow[]) {
for (const row of rows) {
const rawRow = toInsertRawRow(row);
this.dbQuery(
`
INSERT INTO bundles (
id,
status,
hash,
bundle,
eligibleAfter,
nextEligibilityDelay,
submitError,
receipt,
aggregateHash
) VALUES (
:id,
:status,
:hash,
:bundle,
:eligibleAfter,
:nextEligibilityDelay,
:submitError,
:receipt,
:aggregateHash
)
`,
{
":status": rawRow.status,
":hash": rawRow.hash,
":bundle": rawRow.bundle,
":eligibleAfter": rawRow.eligibleAfter,
":nextEligibilityDelay": rawRow.nextEligibilityDelay,
":submitError": rawRow.submitError,
":receipt": rawRow.receipt,
":aggregateHash": rawRow.aggregateHash,
},
);
}
async add(...rows: InsertRow[]) {
await this.queryTable.insert(...rows.map(toInsertRawRow));
}
update(row: Row) {
const rawRow = toRawRow(row);
async update(row: Row) {
await this.queryTable.where({ id: row.id }).update(toRawRow(row));
}
this.dbQuery(
async remove(...rows: Row[]) {
await Promise.all(rows.map((row) =>
this.queryTable
.where({ id: assertExists(row.id) })
.delete()
));
}
async findEligible(blockNumber: BigNumber, limit: number) {
const rows: RawRow[] = await this.queryClient.query(
`
UPDATE bundles
SET
status = :status,
hash = :hash,
bundle = :bundle,
eligibleAfter = :eligibleAfter,
nextEligibilityDelay = :nextEligibilityDelay,
submitError = :submitError,
receipt = :receipt,
aggregateHash = :aggregateHash
SELECT * from ${this.safeName}
WHERE
id = :id
"eligibleAfter" <= '${toUint256Hex(blockNumber)}'
ORDER BY "id" ASC
LIMIT ${limit}
`,
{
":id": rawRow.id,
":status": rawRow.status,
":hash": rawRow.hash,
":bundle": rawRow.bundle,
":eligibleAfter": rawRow.eligibleAfter,
":nextEligibilityDelay": rawRow.nextEligibilityDelay,
":submitError": rawRow.submitError,
":receipt": rawRow.receipt,
":aggregateHash": rawRow.aggregateHash,
},
);
}
remove(...rows: Row[]) {
for (const row of rows) {
this.dbQuery(
"DELETE FROM bundles WHERE id = :id",
{ ":id": assertExists(row.id) },
);
}
}
findEligible(blockNumber: BigNumber, limit: number): Row[] {
const rows = this.dbQuery(
`
SELECT * from bundles
WHERE
eligibleAfter <= '${toUint256Hex(blockNumber)}' AND
status = 'pending'
ORDER BY id ASC
LIMIT :limit
`,
{
":limit": limit,
},
);
return rows.map(fromRawRow);
}
findBundle(hash: string): Row | nil {
const rows = this.dbQuery(
"SELECT * from bundles WHERE hash = :hash",
{ ":hash": hash },
async count(): Promise<bigint> {
const result = await this.queryClient.query(
`SELECT COUNT(*) FROM ${this.queryTable.name}`,
);
return rows.map(fromRawRow)[0];
return result[0].count as bigint;
}
findAggregateBundle(aggregateHash: string): Row[] | nil {
const rows = this.dbQuery(
`
SELECT * from bundles
WHERE
aggregateHash = :aggregateHash AND
status = 'confirmed'
ORDER BY id ASC
`,
{ ":aggregateHash": aggregateHash },
);
return rows.map(fromRawRow);
}
count(): number {
const result = this.dbQuery("SELECT COUNT(*) FROM bundles")[0][0];
assert(typeof result === "number");
return result;
}
all(): Row[] {
const rawRows = this.dbQuery(
"SELECT * FROM bundles",
async all(): Promise<Row[]> {
const rawRows: RawRow[] = await this.queryClient.query(
`SELECT * FROM ${this.queryTable.name}`,
);
return rawRows.map(fromRawRow);
}
drop() {
this.dbQuery("DROP TABLE bundles");
async drop() {
await this.queryTable.drop(true);
}
clear() {
this.dbQuery("DELETE from bundles");
async clear() {
return await this.queryClient.query(`
DELETE from ${this.safeName}
`);
}
}
function toUint256Hex(n: BigNumber) {
return `0x${n.toHexString().slice(2).padStart(64, "0")}`;
}
function isValidStatus(status: unknown): status is BundleStatus {
return typeof status === "string" &&
BundleStatuses.includes(status as ExplicitAny);
}

View File

@@ -1,20 +1,17 @@
import {
AggregatorUtilities,
AggregatorUtilities__factory,
BaseContract,
BigNumber,
BlsRegistrationCompressor,
BlsWalletSigner,
BlsWalletWrapper,
Bundle,
BundleCompressor,
BytesLike,
ContractsConnector,
delay,
Erc20Compressor,
ethers,
FallbackCompressor,
initBlsWalletSigner,
VerificationGateway,
VerificationGateway__factory,
Wallet,
} from "../../deps.ts";
@@ -25,9 +22,6 @@ import AppEvent from "./AppEvent.ts";
import toPublicKeyShort from "./helpers/toPublicKeyShort.ts";
import AsyncReturnType from "../helpers/AsyncReturnType.ts";
import ExplicitAny from "../helpers/ExplicitAny.ts";
import nil from "../helpers/nil.ts";
import hexToUint8Array from "../helpers/hexToUint8Array.ts";
import OptimismGasPriceOracle from "./OptimismGasPriceOracle.ts";
export type TxCheckResult = {
failures: TransactionFailure[];
@@ -48,9 +42,10 @@ type CallHelper<T> = {
resultDecoder: (result: BytesLike) => T;
};
type CallResult<T> =
type CallResult<T> = (
| { success: true; returnValue: T }
| { success: false; returnValue: undefined };
| { success: false; returnValue: undefined }
);
type MapCallHelperReturns<T> = T extends CallHelper<unknown>[]
? (T extends [CallHelper<infer First>, ...infer Rest]
@@ -69,24 +64,28 @@ type DecodeReturnType<
Method extends keyof Contract["callStatic"],
> = EnforceArray<AsyncReturnType<Contract["callStatic"][Method]>>;
type ExpanderEntryPoint = AsyncReturnType<
ContractsConnector["ExpanderEntryPoint"]
>;
export default class EthereumService {
verificationGateway: VerificationGateway;
utilities: AggregatorUtilities;
constructor(
public emit: (evt: AppEvent) => void,
public wallet: Wallet,
public provider: ethers.providers.Provider,
public chainId: number,
public blsWalletWrapper: BlsWalletWrapper,
public blsWalletSigner: BlsWalletSigner,
public verificationGateway: VerificationGateway,
public aggregatorUtilities: AggregatorUtilities,
public expanderEntryPoint: ExpanderEntryPoint,
public bundleCompressor: BundleCompressor,
verificationGatewayAddress: string,
utilitiesAddress: string,
public nextNonce: BigNumber,
) {}
) {
this.verificationGateway = VerificationGateway__factory.connect(
verificationGatewayAddress,
this.wallet,
);
this.utilities = AggregatorUtilities__factory.connect(
utilitiesAddress,
this.wallet,
);
}
NextNonce() {
const result = this.nextNonce;
@@ -96,99 +95,37 @@ export default class EthereumService {
static async create(
emit: (evt: AppEvent) => void,
verificationGatewayAddress: string,
utilitiesAddress: string,
aggPrivateKey: string,
): Promise<EthereumService> {
const provider = new ethers.providers.JsonRpcProvider(env.RPC_URL);
provider.pollingInterval = env.RPC_POLLING_INTERVAL;
const wallet = EthereumService.Wallet(provider, aggPrivateKey);
const contractsConnector = await ContractsConnector.create(wallet);
const [
verificationGateway,
aggregatorUtilities,
blsExpanderDelegator,
erc20Expander,
blsRegistration,
fallbackExpander,
expanderEntryPoint,
] = await Promise.all([
contractsConnector.VerificationGateway(),
contractsConnector.AggregatorUtilities(),
contractsConnector.BLSExpanderDelegator(),
contractsConnector.ERC20Expander(),
contractsConnector.BLSRegistration(),
contractsConnector.FallbackExpander(),
contractsConnector.ExpanderEntryPoint(),
]);
const blsWalletWrapper = await BlsWalletWrapper.connect(
aggPrivateKey,
verificationGateway.address,
provider,
);
const blsNonce = await blsWalletWrapper.Nonce();
if (blsNonce.eq(0)) {
if (!env.AUTO_CREATE_INTERNAL_BLS_WALLET) {
throw new Error([
"Required internal bls wallet does not exist. Either enable",
"AUTO_CREATE_INTERNAL_BLS_WALLET or run",
"./programs/createInternalBlsWallet.ts",
].join(" "));
}
await (await verificationGateway.processBundle(
await blsWalletWrapper.signWithGasEstimate({
nonce: 0,
actions: [],
}),
)).wait();
}
const wallet = EthereumService.Wallet(aggPrivateKey);
const nextNonce = BigNumber.from(await wallet.getTransactionCount());
const chainId = await wallet.getChainId();
const blsWalletSigner = await initBlsWalletSigner({
chainId,
privateKey: aggPrivateKey,
verificationGatewayAddress: verificationGateway.address,
});
const bundleCompressor = new BundleCompressor(blsExpanderDelegator);
const [erc20Compressor, blsRegistrationCompressor, fallbackCompressor] =
await Promise.all([
Erc20Compressor.wrap(erc20Expander),
BlsRegistrationCompressor.wrap(blsRegistration),
FallbackCompressor.wrap(fallbackExpander),
]);
await bundleCompressor.addCompressor(erc20Compressor);
await bundleCompressor.addCompressor(blsRegistrationCompressor);
await bundleCompressor.addCompressor(fallbackCompressor);
const blsWalletSigner = await initBlsWalletSigner({ chainId });
return new EthereumService(
emit,
wallet,
provider,
chainId,
blsWalletWrapper,
blsWalletSigner,
verificationGateway,
aggregatorUtilities,
expanderEntryPoint,
bundleCompressor,
verificationGatewayAddress,
utilitiesAddress,
nextNonce,
);
}
async BlockNumber(): Promise<BigNumber> {
return BigNumber.from(
await this.provider.getBlockNumber(),
await this.wallet.provider.getBlockNumber(),
);
}
async waitForNextBlock() {
await new Promise((resolve) => {
this.wallet.provider.once("block", resolve);
});
}
// TODO (merge-ok): Consider: We may want to fail operations
// that are not at the next expected nonce, including all
// current pending transactions for that wallet.
@@ -251,10 +188,9 @@ export default class EthereumService {
async callStaticSequence<Calls extends CallHelper<unknown>[]>(
...calls: Calls
): Promise<MapCallHelperReturns<Calls>> {
const rawResults = await this.aggregatorUtilities.callStatic
.performSequence(
calls.map((c) => c.value),
);
const rawResults = await this.utilities.callStatic.performSequence(
calls.map((c) => c.value),
);
const results: CallResult<unknown>[] = rawResults.map(
([success, result], i) => {
@@ -275,10 +211,10 @@ export default class EthereumService {
async callStaticSequenceWithMeasure<Measure, CallReturn>(
measureCall: CallHelper<Measure>,
calls: CallHelper<CallReturn>[],
): Promise<{
): (Promise<{
measureResults: CallResult<Measure>[];
callResults: CallResult<CallReturn>[];
}> {
}>) {
const fullCalls: CallHelper<unknown>[] = [measureCall];
for (const call of calls) {
@@ -322,33 +258,19 @@ export default class EthereumService {
assert(bundle.operations.length > 0, "Cannot process empty bundle");
assert(maxAttempts > 0, "Must have at least one attempt");
const compressedBundle = await this.bundleCompressor.compress(bundle);
const [rawTx, rawCompressedTx] = await Promise.all([
this.verificationGateway.populateTransaction.processBundle(bundle).then(
(tx) => this.wallet.signTransaction(tx),
),
this.wallet.signTransaction({
to: this.expanderEntryPoint.address,
data: compressedBundle,
}),
]);
const txLen = ethers.utils.hexDataLength(rawTx);
const compressedTxLen = ethers.utils.hexDataLength(rawCompressedTx);
const txRequest: ethers.providers.TransactionRequest = {
to: this.expanderEntryPoint.address,
data: compressedBundle,
nonce: this.NextNonce(),
...await this.GasConfig(),
};
const processBundleArgs: Parameters<VerificationGateway["processBundle"]> =
[
bundle,
{ nonce: this.NextNonce() },
];
const attempt = async () => {
let response: ethers.providers.TransactionResponse;
let txResponse: ethers.providers.TransactionResponse;
try {
response = await this.wallet.sendTransaction(txRequest);
txResponse = await this.verificationGateway.processBundle(
...processBundleArgs,
);
} catch (error) {
if (/\binvalid transaction nonce\b/.test(error.message)) {
// This can occur when the nonce is in the future, which can
@@ -364,10 +286,7 @@ export default class EthereumService {
}
try {
return {
type: "complete" as const,
value: await response.wait(),
};
return { type: "receipt" as const, value: await txResponse.wait() };
} catch (error) {
return { type: "waitError" as const, value: error };
}
@@ -378,12 +297,12 @@ export default class EthereumService {
for (let i = 0; i < maxAttempts; i++) {
this.emit({
type: "submission-attempt",
data: { attemptNumber: i + 1, publicKeyShorts, txLen, compressedTxLen },
data: { attemptNumber: i + 1, publicKeyShorts },
});
const attemptResult = await attempt();
if (attemptResult.type === "complete") {
if (attemptResult.type === "receipt") {
return attemptResult.value;
}
@@ -412,134 +331,8 @@ export default class EthereumService {
throw new Error("Expected return or throw from attempt loop");
}
/**
* Estimates the amount of effective gas needed to process the bundle using
* compression.
*
* Here 'effective' gas means the number you need to multiply by gasPrice in
* order to get the right fee. There are a few cases here:
*
* 1. L1 chains (used in testing, eg gethDev)
* - Effective gas is equal to regular gas
* 2. Arbitrum
* - The Arbitrum node already responds with effective gas when calling
* estimateGas
* 3. Optimism
* - We estimate Optimism's calculation for the amount of L1 gas it will
* charge for, and then convert that into an equivalend amount of L2 gas.
*/
async estimateEffectiveCompressedGas(bundle: Bundle): Promise<BigNumber> {
const compressedBundle = await this.bundleCompressor.compress(bundle);
let gasEstimate = await this.wallet.estimateGas({
to: this.expanderEntryPoint.address,
data: compressedBundle,
});
if (env.IS_OPTIMISM) {
const extraGasEstimate = await this.estimateOptimismL2GasNeededForL1Gas(
compressedBundle,
gasEstimate,
);
gasEstimate = gasEstimate.add(extraGasEstimate);
}
return gasEstimate;
}
async GasConfig(block?: ethers.providers.Block) {
block ??= await this.provider.getBlock("latest");
const previousBaseFee = block.baseFeePerGas;
assert(previousBaseFee !== null && previousBaseFee !== nil);
// Increase the basefee we're willing to pay to improve the chance of our
// transaction getting included. As per EIP-1559, we only pay the actual
// basefee anyway, *but* we also pass this fee onto users which don't have
// this benefit (they'll pay regardless of where basefee lands).
//
// This means there's a tradeoff here - low values risk our transactions not
// being included, high values pass on unnecessary fees to users.
//
const baseFeeIncrease = previousBaseFee.mul(
env.PREVIOUS_BASE_FEE_PERCENT_INCREASE,
).div(100);
return {
maxFeePerGas: previousBaseFee
.add(baseFeeIncrease)
// Remember that basefee is burned, not provided to miners. Miners
// *only* get the priority fee, so they have no reason to care about our
// transaction if the priority fee is zero.
.add(env.PRIORITY_FEE_PER_GAS),
maxPriorityFeePerGas: env.PRIORITY_FEE_PER_GAS,
};
}
/**
* Estimates the L1 gas that Optimism will charge us for and expresses it as
* an amount of equivalent L2 gas.
*
* This is very similar to what Arbitrum does, but in Arbitrum it's built-in,
* and you actually sign for that additional L2 gas. On Optimism, you only
* sign for the actual L2 gas, and optimism just adds the L1 fee.
*
* For our purposes, this works as a way to normalize the behavior between
* the different chains.
*/
async estimateOptimismL2GasNeededForL1Gas(
compressedBundle: string,
gasLimit: BigNumber,
): Promise<BigNumber> {
const block = await this.provider.getBlock("latest");
const gasConfig = await this.GasConfig(block);
const txBytes = await this.wallet.signTransaction({
type: 2,
chainId: this.chainId,
nonce: this.nextNonce,
to: this.expanderEntryPoint.address,
data: compressedBundle,
...gasConfig,
gasLimit,
});
let l1Gas = 0;
for (const byte of hexToUint8Array(txBytes)) {
if (byte === 0) {
l1Gas += 4;
} else {
l1Gas += 16;
}
}
const gasOracle = new OptimismGasPriceOracle(this.provider);
const { l1BaseFee, overhead, scalar, decimals } = await gasOracle
.getAllParams();
const scalarNum = scalar.toNumber() / (10 ** decimals.toNumber());
l1Gas += overhead.toNumber();
assert(block.baseFeePerGas !== null && block.baseFeePerGas !== nil);
assert(env.OPTIMISM_L1_BASE_FEE_PERCENT_INCREASE !== nil);
const adjustedL1BaseFee = l1BaseFee.toNumber() * scalarNum *
(1 + env.OPTIMISM_L1_BASE_FEE_PERCENT_INCREASE / 100);
const feeRatio = adjustedL1BaseFee / block.baseFeePerGas.toNumber();
return BigNumber.from(
Math.ceil(feeRatio * l1Gas),
);
}
private static Wallet(
provider: ethers.providers.Provider,
privateKey: string,
) {
private static Wallet(privateKey: string) {
const provider = new ethers.providers.JsonRpcProvider(env.RPC_URL);
const wallet = new Wallet(privateKey, provider);
if (env.USE_TEST_NET) {

View File

@@ -1,16 +0,0 @@
import { Router } from "../../deps.ts";
import HealthService from "./HealthService.ts";
export default function HealthRouter(healthService: HealthService) {
const router = new Router({ prefix: "/" });
router.get(
"health",
async (ctx) => {
const healthResults = await healthService.getHealth();
console.log(`Status: ${healthResults.status}\n`);
ctx.response.status = healthResults.status == 'healthy' ? 200 : 503;
ctx.response.body = { status: healthResults.status };
});
return router;
}

View File

@@ -1,11 +0,0 @@
export type ResourceHealth = 'healthy' | 'unhealthy';
type HealthCheckResult = {
status: ResourceHealth,
};
export default class HealthService {
getHealth(): Promise<HealthCheckResult> {
return Promise.resolve({ status: 'healthy' });
}
}

View File

@@ -1,52 +0,0 @@
import { BigNumber, ethers } from "../../deps.ts";
import assert from "../helpers/assert.ts";
import { OPTIMISM_GAS_PRICE_ORACLE_ADDRESS } from "../env.ts";
export default class OptimismGasPriceOracle {
constructor(
public provider: ethers.providers.Provider,
) {}
private async callFn(method: string, blockTag?: ethers.providers.BlockTag) {
const outputBytes = await this.provider.call({
to: OPTIMISM_GAS_PRICE_ORACLE_ADDRESS,
data: ethers.utils.id(method),
}, blockTag);
const result = ethers.utils.defaultAbiCoder.decode(
["uint256"],
outputBytes,
)[0];
assert(result instanceof BigNumber);
return result;
}
async l1BaseFee(blockTag?: ethers.providers.BlockTag) {
return await this.callFn("l1BaseFee()", blockTag);
}
async overhead(blockTag?: ethers.providers.BlockTag) {
return await this.callFn("overhead()", blockTag);
}
async scalar(blockTag?: ethers.providers.BlockTag) {
return await this.callFn("scalar()", blockTag);
}
async decimals(blockTag?: ethers.providers.BlockTag) {
return await this.callFn("decimals()", blockTag);
}
async getAllParams(blockTag?: ethers.providers.BlockTag) {
const [l1BaseFee, overhead, scalar, decimals] = await Promise.all([
this.l1BaseFee(blockTag),
this.overhead(blockTag),
this.scalar(blockTag),
this.decimals(blockTag),
]);
return { l1BaseFee, overhead, scalar, decimals };
}
}

View File

@@ -1,4 +1,4 @@
import { Application, oakCors, sqlite } from "../../deps.ts";
import { Application, oakCors } from "../../deps.ts";
import * as env from "../env.ts";
import EthereumService from "./EthereumService.ts";
@@ -8,49 +8,43 @@ import AdminRouter from "./AdminRouter.ts";
import AdminService from "./AdminService.ts";
import errorHandler from "./errorHandler.ts";
import notFoundHandler from "./notFoundHandler.ts";
import createQueryClient from "./createQueryClient.ts";
import Mutex from "../helpers/Mutex.ts";
import Clock from "../helpers/Clock.ts";
import getNetworkConfig from "../helpers/getNetworkConfig.ts";
import AppEvent from "./AppEvent.ts";
import BundleTable from "./BundleTable.ts";
import AggregationStrategy from "./AggregationStrategy.ts";
import AggregationStrategyRouter from "./AggregationStrategyRouter.ts";
import HealthService from "./HealthService.ts";
import HealthRouter from "./HealthRouter.ts";
export default async function app(emit: (evt: AppEvent) => void) {
emit({ type: "starting" });
const { addresses } = await getNetworkConfig();
const clock = Clock.create();
const queryClient = createQueryClient(emit);
const bundleTableMutex = new Mutex();
const bundleTable = new BundleTable(
new sqlite.DB(env.DB_PATH),
(sql, params) => {
if (env.LOG_QUERIES) {
emit({
type: "db-query",
data: { sql, params },
});
}
},
const bundleTable = await BundleTable.create(
queryClient,
env.BUNDLE_TABLE_NAME,
);
const ethereumService = await EthereumService.create(
emit,
addresses.verificationGateway,
addresses.utilities,
env.PRIVATE_KEY_AGG,
);
const aggregationStrategy = new AggregationStrategy(
ethereumService.blsWalletSigner,
ethereumService,
AggregationStrategy.defaultConfig,
emit,
);
const bundleService = new BundleService(
emit,
clock,
queryClient,
bundleTableMutex,
bundleTable,
ethereumService.blsWalletSigner,
@@ -63,13 +57,10 @@ export default async function app(emit: (evt: AppEvent) => void) {
bundleTable,
);
const healthService = new HealthService();
const routers = [
BundleRouter(bundleService),
AdminRouter(adminService),
AggregationStrategyRouter(aggregationStrategy),
HealthRouter(healthService),
];
const app = new Application();

View File

@@ -0,0 +1,39 @@
import { QueryClient } from "../../deps.ts";
import * as env from "../env.ts";
import AppEvent from "./AppEvent.ts";
export default function createQueryClient(
emit: (evt: AppEvent) => void,
/**
* Sadly, there appears to be a singleton inside QueryClient, which forces us
* to re-use it during testing.
*/
existingClient?: QueryClient,
): QueryClient {
const client = existingClient ?? new QueryClient({
hostname: env.PG.HOST,
port: env.PG.PORT,
user: env.PG.USER,
password: env.PG.PASSWORD,
database: env.PG.DB_NAME,
tls: {
enforce: false,
},
});
if (env.LOG_QUERIES) {
const originalQuery = client.query.bind(client);
client.query = async (sql, params) => {
emit({
type: "db-query",
data: { sql, params: params ?? [] },
});
return await originalQuery(sql, params);
};
}
return client;
}

View File

@@ -0,0 +1,6 @@
import { Bundle } from "../../../deps.ts";
import plus from "./plus.ts";
export default function countActions(bundle: Bundle) {
return bundle.operations.map((op) => op.actions.length).reduce(plus, 0);
}

View File

@@ -1,3 +0,0 @@
export default function never(value: never): never {
throw new Error(`Unexpected value: ${value}`);
}

View File

@@ -1,8 +1,9 @@
import { BundleDto } from "../../deps.ts";
type ParseResult<T> =
type ParseResult<T> = (
| { success: T }
| { failures: string[] };
| { failures: string[] }
);
type Parser<T> = (value: unknown) => ParseResult<T>;
@@ -95,12 +96,14 @@ export function parseArray<T>(
};
}
type DataTuple<ParserTuple> = ParserTuple extends Parser<unknown>[] ? (
type DataTuple<ParserTuple> = (
ParserTuple extends Parser<unknown>[] ? (
ParserTuple extends [Parser<infer T>, ...infer Tail]
? [T, ...DataTuple<Tail>]
: []
)
: never;
: never
);
export function parseTuple<ParserTuple extends Parser<unknown>[]>(
...parserTuple: ParserTuple
@@ -185,7 +188,6 @@ const parseActionDataDto: Parser<ActionDataDto> = parseObject({
const parseOperationDto: Parser<OperationDto> = parseObject({
nonce: parseHex(),
gas: parseHex(),
actions: parseArray(parseActionDataDto),
});

View File

@@ -1,17 +1,18 @@
import { QueryClient } from "../../deps.ts";
import Mutex from "../helpers/Mutex.ts";
import AppEvent from "./AppEvent.ts";
export default async function runQueryGroup<T>(
emit: (evt: AppEvent) => void,
query: (sql: string) => void,
mutex: Mutex,
queryClient: QueryClient,
body: () => Promise<T>,
) {
const lock = await mutex.Lock();
let completed = false;
try {
query("BEGIN");
queryClient.query("BEGIN");
const result = await body();
completed = true;
return result;
@@ -24,6 +25,6 @@ export default async function runQueryGroup<T>(
throw error;
} finally {
lock.release();
query(completed ? "COMMIT" : "ROLLBACK");
await queryClient.query(completed ? "COMMIT" : "ROLLBACK");
}
}

View File

@@ -1,51 +1,42 @@
import assert from "./helpers/assert.ts";
import {
optionalEnv,
optionalNumberEnv,
requireBigNumberEnv,
requireBoolEnv,
requireEnv,
requireIntEnv,
requireNumberEnv,
} from "./helpers/envTools.ts";
import nil from "./helpers/nil.ts";
export const RPC_URL = requireEnv("RPC_URL");
export const RPC_POLLING_INTERVAL = requireIntEnv("RPC_POLLING_INTERVAL");
export const ORIGIN = requireEnv("ORIGIN");
export const PORT = requireIntEnv("PORT");
export const USE_TEST_NET = requireBoolEnv("USE_TEST_NET");
export const NETWORK_CONFIG_PATH = Deno.env.get("IS_DOCKER") === "true"
? "/app/networkConfig.json"
: requireEnv("NETWORK_CONFIG_PATH");
export const NETWORK_CONFIG_PATH = requireEnv("NETWORK_CONFIG_PATH");
export const PRIVATE_KEY_AGG = requireEnv("PRIVATE_KEY_AGG");
export const PRIVATE_KEY_ADMIN = requireEnv("PRIVATE_KEY_ADMIN");
export const DB_PATH = requireEnv("DB_PATH");
export const PG = {
HOST: requireEnv("PG_HOST"),
PORT: requireEnv("PG_PORT"),
USER: requireEnv("PG_USER"),
PASSWORD: requireEnv("PG_PASSWORD"),
DB_NAME: requireEnv("PG_DB_NAME"),
};
export const BUNDLE_TABLE_NAME = requireEnv("BUNDLE_TABLE_NAME");
/**
* Query limit used when processing potentially large numbers of bundles.
* (Using batching if needed.)
*/
export const BUNDLE_QUERY_LIMIT = requireIntEnv("BUNDLE_QUERY_LIMIT");
/**
* Maximum retry delay in blocks before a failed bundle is discarded.
*/
export const MAX_ELIGIBILITY_DELAY = requireIntEnv("MAX_ELIGIBILITY_DELAY");
/**
* Approximate maximum gas of aggregate bundles.
*
* It's approximate because we use the sum of the marginal gas estimates and add
* the bundle overhead, which is not exactly the same as the gas used when
* putting the bundle together.
*/
export const MAX_GAS_PER_BUNDLE = requireIntEnv("MAX_GAS_PER_BUNDLE");
export const MAX_AGGREGATION_SIZE = requireIntEnv("MAX_AGGREGATION_SIZE");
export const MAX_AGGREGATION_DELAY_MILLIS = requireIntEnv(
"MAX_AGGREGATION_DELAY_MILLIS",
@@ -57,78 +48,10 @@ export const MAX_UNCONFIRMED_AGGREGATIONS = requireIntEnv(
export const LOG_QUERIES = requireBoolEnv("LOG_QUERIES");
export const REQUIRE_FEES = requireBoolEnv("REQUIRE_FEES");
export const BREAKEVEN_OPERATION_COUNT = requireNumberEnv(
"BREAKEVEN_OPERATION_COUNT",
);
export const ALLOW_LOSSES = requireBoolEnv("ALLOW_LOSSES");
export const FEE_TYPE = requireEnv("FEE_TYPE");
export const FEE_PER_GAS = requireBigNumberEnv("FEE_PER_GAS");
export const FEE_PER_BYTE = requireBigNumberEnv("FEE_PER_BYTE");
if (!/^(ether|token:0x[0-9a-fA-F]*)$/.test(FEE_TYPE)) {
throw new Error(`FEE_TYPE has invalid format: "${FEE_TYPE}"`);
}
export const ETH_VALUE_IN_TOKENS = optionalNumberEnv("ETH_VALUE_IN_TOKENS");
if (FEE_TYPE.startsWith("token:") && ETH_VALUE_IN_TOKENS === nil) {
throw new Error([
"Missing ETH_VALUE_IN_TOKENS, which is required because FEE_TYPE is a",
"token",
].join(" "));
}
export const AUTO_CREATE_INTERNAL_BLS_WALLET = requireBoolEnv(
"AUTO_CREATE_INTERNAL_BLS_WALLET",
);
export const PRIORITY_FEE_PER_GAS = requireBigNumberEnv("PRIORITY_FEE_PER_GAS");
/**
* Used to determine the expected basefee when submitting bundles. Note that
* this gets passed onto users.
*/
export const PREVIOUS_BASE_FEE_PERCENT_INCREASE = requireNumberEnv(
"PREVIOUS_BASE_FEE_PERCENT_INCREASE",
);
export const BUNDLE_CHECKING_CONCURRENCY = requireIntEnv(
"BUNDLE_CHECKING_CONCURRENCY",
);
/**
* Optimism's strategy for charging for L1 fees requires special logic in the
* aggregator. In addition to gasEstimate * gasPrice, we need to replicate
* Optimism's calculation and pass it on to the user.
*/
export const IS_OPTIMISM = requireBoolEnv("IS_OPTIMISM");
/**
* Address for the Optimism gas price oracle contract. Required when
* IS_OPTIMISM is true.
*/
export const OPTIMISM_GAS_PRICE_ORACLE_ADDRESS = optionalEnv(
"OPTIMISM_GAS_PRICE_ORACLE_ADDRESS",
);
/**
* Similar to PREVIOUS_BASE_FEE_PERCENT_INCREASE, but for the L1 basefee for
* the optimism-specific calculation. This gets passed on to users.
* Required when IS_OPTIMISM is true.
*/
export const OPTIMISM_L1_BASE_FEE_PERCENT_INCREASE = optionalNumberEnv(
"OPTIMISM_L1_BASE_FEE_PERCENT_INCREASE",
);
if (IS_OPTIMISM) {
assert(
OPTIMISM_L1_BASE_FEE_PERCENT_INCREASE !== nil,
"OPTIMISM_L1_BASE_FEE_PERCENT_INCREASE is required when IS_OPTIMISM is true",
);
assert(
OPTIMISM_GAS_PRICE_ORACLE_ADDRESS !== nil,
"OPTIMISM_GAS_PRICE_ORACLE_ADDRESS is required when IS_OPTIMISM is true",
);
}

View File

@@ -62,19 +62,3 @@ export function requireNumberEnv(envName: string): number {
return value;
}
export function optionalNumberEnv(envName: string): number | nil {
const strValue = optionalEnv(envName);
if (strValue === nil) {
return nil;
}
const value = Number(strValue);
if (!Number.isFinite(value)) {
throw new Error(`Failed to parse ${envName} as number: ${strValue}`);
}
return value;
}

View File

@@ -1,50 +0,0 @@
import { BigNumber, ethers } from "../../deps.ts";
import OptimismGasPriceOracle from "../app/OptimismGasPriceOracle.ts";
import assert from "./assert.ts";
import getRawTransaction from "./getRawTransaction.ts";
import hexToUint8Array from "./hexToUint8Array.ts";
import nil from "./nil.ts";
export default async function getOptimismL1Fee(
provider: ethers.providers.Provider,
txResponseOrHash: string | ethers.providers.TransactionResponse,
) {
const tx = typeof txResponseOrHash === "string"
? await provider.getTransaction(txResponseOrHash)
: txResponseOrHash;
const rawTx = await getRawTransaction(provider, tx);
let l1Gas = 0;
for (const byte of hexToUint8Array(rawTx)) {
if (byte === 0) {
l1Gas += 4;
} else {
l1Gas += 16;
}
}
const gasOracle = new OptimismGasPriceOracle(provider);
assert(tx.blockNumber !== nil);
const {
l1BaseFee,
overhead,
scalar,
decimals,
} = await gasOracle.getAllParams(tx.blockNumber);
l1Gas = l1Gas += overhead.toNumber();
const l1Fee = BigNumber
.from(l1Gas)
.mul(l1BaseFee)
.mul(scalar)
.div(
BigNumber.from(10).pow(decimals),
);
return l1Fee;
}

View File

@@ -1,49 +0,0 @@
import { ethers, pick } from "../../deps.ts";
import assert from "./assert.ts";
import nil from "./nil.ts";
export default async function getRawTransaction(
provider: ethers.providers.Provider,
txResponseOrHash: string | ethers.providers.TransactionResponse,
) {
const tx = typeof txResponseOrHash === "string"
? await provider.getTransaction(txResponseOrHash)
: txResponseOrHash;
const txHash = typeof txResponseOrHash === "string"
? txResponseOrHash
: tx.hash;
assert(typeof txHash === "string");
const { v, r, s } = tx;
assert(r !== nil);
const txBytes = ethers.utils.serializeTransaction(
pick(
tx,
"to",
"nonce",
"gasLimit",
...(tx.type === 2 ? [] : ["gasPrice"] as const),
"data",
"value",
"chainId",
"type",
...(tx.type !== 2 ? [] : [
"accessList",
"maxPriorityFeePerGas",
"maxFeePerGas",
] as const),
),
{ v, r, s },
);
const reconstructedHash = ethers.utils.keccak256(txBytes);
if (reconstructedHash !== txHash) {
throw new Error("Reconstructed hash did not match original hash");
}
return txBytes;
}

View File

@@ -1,16 +0,0 @@
import assert from "./assert.ts";
export default function hexToUint8Array(hex: string) {
assert(hex.startsWith("0x"));
assert(hex.length % 2 === 0);
const len = (hex.length - 2) / 2;
const result = new Uint8Array(len);
for (let i = 0; i < len; i++) {
const hexPos = 2 * i + 2;
result[i] = parseInt(hex.slice(hexPos, hexPos + 2), 16);
}
return result;
}

View File

@@ -1,15 +1,13 @@
import AggregationStrategy from "../src/app/AggregationStrategy.ts";
import { BundleRow } from "../src/app/BundleTable.ts";
import assert from "../src/helpers/assert.ts";
import nil from "../src/helpers/nil.ts";
import { assertEquals, BigNumber, ethers } from "./deps.ts";
import { assertEquals, BigNumber } from "./deps.ts";
import Fixture from "./helpers/Fixture.ts";
Fixture.test("nonzero fee estimate from default test config", async (fx) => {
Fixture.test("zero fee estimate from default test config", async (fx) => {
const [wallet] = await fx.setupWallets(1);
const bundle = await wallet.signWithGasEstimate({
const bundle = wallet.sign({
nonce: await wallet.Nonce(),
actions: [
{
@@ -25,9 +23,11 @@ Fixture.test("nonzero fee estimate from default test config", async (fx) => {
const feeEstimation = await fx.aggregationStrategy.estimateFee(bundle);
assertEquals(feeEstimation.feeDetected, BigNumber.from(0));
assert(feeEstimation.feeRequired.gt(0));
assertEquals(feeEstimation.successes, [true]);
assertEquals(feeEstimation, {
feeDetected: BigNumber.from(0),
feeRequired: BigNumber.from(0),
successes: [true],
});
});
Fixture.test("includes bundle in aggregation when estimated fee is provided", async (fx) => {
@@ -37,21 +37,18 @@ Fixture.test("includes bundle in aggregation when estimated fee is provided", as
fx.blsWalletSigner,
fx.ethereumService,
{
maxGasPerBundle: 1500000,
maxAggregationSize: 12,
fees: {
type: "token",
address: fx.testErc20.address,
allowLosses: true,
breakevenOperationCount: 4.5,
ethValueInTokens: 1300,
type: `token:${fx.testErc20.address}`,
perGas: BigNumber.from(1000000000),
perByte: BigNumber.from(10000000000000),
},
bundleCheckingConcurrency: 8,
},
);
const nonce = await wallet.Nonce();
let bundle = await wallet.signWithGasEstimate({
let bundle = wallet.sign({
nonce,
actions: [
{
@@ -73,7 +70,7 @@ Fixture.test("includes bundle in aggregation when estimated fee is provided", as
const feeEstimation = await aggregationStrategy.estimateFee(bundle);
const safetyDivisor = 5;
const safetyDivisor = 100;
const safetyPremium = feeEstimation.feeRequired.div(safetyDivisor);
// Due to small fluctuations is gas estimation, we add a little safety premium
@@ -84,7 +81,7 @@ Fixture.test("includes bundle in aggregation when estimated fee is provided", as
assertEquals(feeEstimation.feeDetected, BigNumber.from(1));
// Redefine bundle using the estimated fee
bundle = await wallet.signWithGasEstimate({
bundle = wallet.sign({
nonce,
actions: [
{
@@ -100,7 +97,6 @@ Fixture.test("includes bundle in aggregation when estimated fee is provided", as
const bundleRow: BundleRow = {
id: 0,
status: "pending",
hash: "0x0",
bundle,
eligibleAfter: BigNumber.from(0),
@@ -109,69 +105,9 @@ Fixture.test("includes bundle in aggregation when estimated fee is provided", as
const aggregationResult = await aggregationStrategy.run([bundleRow]);
assertEquals(aggregationResult.aggregateBundle, bundle);
assertEquals(aggregationResult.includedRows, [bundleRow]);
assertEquals(aggregationResult.failedRows, []);
});
Fixture.test("includes submitError on failed row when bundle callStaticSequence fails", async (fx) => {
const [wallet] = await fx.setupWallets(1);
const aggregationStrategy = new AggregationStrategy(
fx.blsWalletSigner,
fx.ethereumService,
{
maxGasPerBundle: 1500000,
fees: {
type: "token",
address: fx.testErc20.address,
allowLosses: true,
breakevenOperationCount: 4.5,
ethValueInTokens: 1300,
},
bundleCheckingConcurrency: 8,
},
);
const nonce = await wallet.Nonce();
const bundle = await wallet.signWithGasEstimate({
nonce,
actions: [
{
ethValue: 0,
contractAddress: fx.testErc20.address,
encodedFunction: fx.testErc20.interface.encodeFunctionData(
"transferFrom",
[
"0x0000000000000000000000000000000000000000",
wallet.address,
ethers.BigNumber.from(
"0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
),
],
),
},
],
assertEquals(aggregationResult, {
aggregateBundle: bundle,
includedRows: [bundleRow],
failedRows: [],
});
const bundleRow: BundleRow = {
id: 0,
status: "pending",
hash: "0x0",
bundle,
eligibleAfter: BigNumber.from(0),
nextEligibilityDelay: BigNumber.from(1),
};
const aggregationResult = await aggregationStrategy.run([bundleRow]);
const expectedFailedRow = {
...bundleRow,
submitError: "ERC20: insufficient allowance",
};
assertEquals(aggregationResult.aggregateBundle, nil);
assertEquals(aggregationResult.includedRows, []);
assertEquals(aggregationResult.failedRows, [expectedFailedRow]);
});

View File

@@ -1,12 +1,12 @@
import { BigNumber, Operation, VerificationGatewayFactory, assertBundleSucceeds, assertEquals, ethers } from "./deps.ts";
import ExplicitAny from "../src/helpers/ExplicitAny.ts";
import { assertEquals, assertBundleSucceeds, Operation } from "./deps.ts";
import Fixture from "./helpers/Fixture.ts";
Fixture.test("adds valid bundle", async (fx) => {
const bundleService = fx.createBundleService();
const bundleService = await fx.createBundleService();
const [wallet] = await fx.setupWallets(1);
const tx = await wallet.signWithGasEstimate({
const tx = wallet.sign({
nonce: await wallet.Nonce(),
actions: [
{
@@ -20,20 +20,19 @@ Fixture.test("adds valid bundle", async (fx) => {
],
});
assertEquals(bundleService.bundleTable.count(), 0);
assertEquals(await bundleService.bundleTable.count(), 0n);
assertBundleSucceeds(await bundleService.add(tx));
assertEquals(await bundleService.bundleTable.count(), 1);
assertEquals(await bundleService.bundleTable.count(), 1n);
});
Fixture.test("rejects bundle with invalid signature", async (fx) => {
const bundleService = fx.createBundleService();
const bundleService = await fx.createBundleService();
const [wallet, otherWallet] = await fx.setupWallets(2);
const operation: Operation = {
nonce: await wallet.Nonce(),
gas: 0,
actions: [
{
ethValue: 0,
@@ -54,7 +53,7 @@ Fixture.test("rejects bundle with invalid signature", async (fx) => {
// sig test)
tx.signature = otherTx.signature;
assertEquals(bundleService.bundleTable.count(), 0);
assertEquals(await bundleService.bundleTable.count(), 0n);
const res = await bundleService.add(tx);
if ("hash" in res) {
@@ -63,56 +62,15 @@ Fixture.test("rejects bundle with invalid signature", async (fx) => {
assertEquals(res.failures.map((f) => f.type), ["invalid-signature"]);
// Bundle table remains empty
assertEquals(bundleService.bundleTable.count(), 0);
});
Fixture.test("rejects bundle with valid signature but invalid public key", async (fx) => {
const bundleService = fx.createBundleService();
const [wallet, otherWallet] = await fx.setupWallets(2);
const operation: Operation = {
nonce: await wallet.Nonce(),
gas: 0,
actions: [
{
ethValue: 0,
contractAddress: fx.testErc20.address,
encodedFunction: fx.testErc20.interface.encodeFunctionData(
"mint",
[wallet.address, "3"],
),
},
],
};
const tx = wallet.sign(operation);
const otherTx = otherWallet.sign(operation);
// Make the signature invalid
// Note: Bug in bls prevents just corrupting the signature (see other invalid
// sig test)
tx.senderPublicKeys[0] = otherTx.senderPublicKeys[0];
assertEquals(bundleService.bundleTable.count(), 0);
const res = await bundleService.add(tx);
if ("hash" in res) {
throw new Error("expected bundle to fail");
}
assertEquals(res.failures.map((f) => f.type), ["invalid-signature"]);
assertEquals(res.failures.map((f) => f.description), [`invalid bundle signature for signature ${tx.signature}`]);
// Bundle table remains empty
assertEquals(bundleService.bundleTable.count(), 0);
assertEquals(await bundleService.bundleTable.count(), 0n);
});
Fixture.test("rejects bundle with nonce from the past", async (fx) => {
const bundleService = fx.createBundleService();
const bundleService = await fx.createBundleService();
const [wallet] = await fx.setupWallets(1);
const tx = wallet.sign({
nonce: (await wallet.Nonce()).sub(1),
gas: 1_000_000,
actions: [
{
ethValue: 0,
@@ -125,7 +83,7 @@ Fixture.test("rejects bundle with nonce from the past", async (fx) => {
],
});
assertEquals(bundleService.bundleTable.count(), 0);
assertEquals(await bundleService.bundleTable.count(), 0n);
const res = await bundleService.add(tx);
if ("hash" in res) {
@@ -134,18 +92,17 @@ Fixture.test("rejects bundle with nonce from the past", async (fx) => {
assertEquals(res.failures.map((f) => f.type), ["duplicate-nonce"]);
// Bundle table remains empty
assertEquals(bundleService.bundleTable.count(), 0);
assertEquals(await bundleService.bundleTable.count(), 0n);
});
Fixture.test(
"rejects bundle with invalid signature and nonce from the past",
async (fx) => {
const bundleService = fx.createBundleService();
const bundleService = await fx.createBundleService();
const [wallet, otherWallet] = await fx.setupWallets(2);
const operation: Operation = {
nonce: (await wallet.Nonce()).sub(1),
gas: 0,
actions: [
{
ethValue: 0,
@@ -168,7 +125,7 @@ Fixture.test(
// https://github.com/thehubbleproject/hubble-bls/pull/20
tx.signature = otherTx.signature;
assertEquals(bundleService.bundleTable.count(), 0);
assertEquals(await bundleService.bundleTable.count(), 0n);
const res = await bundleService.add(tx);
if ("hash" in res) {
@@ -181,17 +138,16 @@ Fixture.test(
);
// Bundle table remains empty
assertEquals(bundleService.bundleTable.count(), 0);
assertEquals(await bundleService.bundleTable.count(), 0n);
},
);
Fixture.test("adds bundle with future nonce", async (fx) => {
const bundleService = fx.createBundleService();
const bundleService = await fx.createBundleService();
const [wallet] = await fx.setupWallets(1);
const tx = wallet.sign({
nonce: (await wallet.Nonce()).add(1),
gas: 100000,
actions: [
{
ethValue: 0,
@@ -204,232 +160,11 @@ Fixture.test("adds bundle with future nonce", async (fx) => {
],
});
assertEquals(bundleService.bundleTable.count(), 0);
assertEquals(await bundleService.bundleTable.count(), 0n);
assertBundleSucceeds(await bundleService.add(tx));
assertEquals(bundleService.bundleTable.count(), 1);
});
Fixture.test("Same bundle produces same hash", async (fx) => {
const bundleService = fx.createBundleService();
const [wallet] = await fx.setupWallets(1);
const nonce = await wallet.Nonce();
const firstBundle = wallet.sign({
nonce,
gas: 100000,
actions: [
{
ethValue: 0,
contractAddress: fx.testErc20.address,
encodedFunction: fx.testErc20.interface.encodeFunctionData(
"mint",
[wallet.address, "3"],
),
},
],
});
const secondBundle = wallet.sign({
nonce,
gas: 999999,
actions: [
{
ethValue: 0,
contractAddress: fx.testErc20.address,
encodedFunction: fx.testErc20.interface.encodeFunctionData(
"mint",
[wallet.address, "3"],
),
},
],
});
const firstBundleHash = await bundleService.hashBundle(firstBundle);
const secondBundleHash = await bundleService.hashBundle(secondBundle);
assertEquals(firstBundleHash, secondBundleHash);
});
Fixture.test("hashes bundle with single operation", async (fx) => {
const bundleService = fx.createBundleService();
const [wallet] = await fx.setupWallets(1);
const nonce = await wallet.Nonce();
const bundle = wallet.sign({
nonce,
gas: 100000,
actions: [
{
ethValue: 0,
contractAddress: fx.testErc20.address,
encodedFunction: fx.testErc20.interface.encodeFunctionData(
"mint",
[wallet.address, "3"],
),
},
],
});
const operationsWithZeroGas = bundle.operations.map((operation) => {
return {
...operation,
gas: BigNumber.from(0),
};
});
const bundleType = VerificationGatewayFactory.abi.find(
(entry) => "name" in entry && entry.name === "verify",
)?.inputs[0];
const validatedBundle = {
...bundle,
operations: operationsWithZeroGas,
};
const encodedBundleWithZeroSignature = ethers.utils.defaultAbiCoder.encode(
[bundleType as ExplicitAny],
[
{
...validatedBundle,
signature: [BigNumber.from(0), BigNumber.from(0)],
},
],
);
const bundleHash = ethers.utils.keccak256(encodedBundleWithZeroSignature);
const chainId = (await bundleService.ethereumService.provider.getNetwork()).chainId;
const bundleAndChainIdEncoding = ethers.utils.defaultAbiCoder.encode(
["bytes32", "uint256"],
[bundleHash, chainId],
);
const expectedBundleHash = ethers.utils.keccak256(bundleAndChainIdEncoding);
const hash = await bundleService.hashBundle(bundle);
assertEquals(hash, expectedBundleHash);
});
Fixture.test("hashes bundle with multiple operations", async (fx) => {
const bundleService = fx.createBundleService();
const [wallet] = await fx.setupWallets(1);
const nonce = await wallet.Nonce();
const bundle = fx.blsWalletSigner.aggregate([
wallet.sign({
nonce,
gas: 1_000_000,
actions: [
{
ethValue: 0,
contractAddress: fx.testErc20.address,
encodedFunction: fx.testErc20.interface.encodeFunctionData(
"mint",
[wallet.address, 3],
),
},
],
}),
wallet.sign({
nonce: nonce.add(1),
gas: 1_000_000,
actions: [
{
ethValue: 0,
contractAddress: fx.testErc20.address,
encodedFunction: fx.testErc20.interface.encodeFunctionData(
"mint",
[wallet.address, 5],
),
},
],
}),
]);
const operationsWithZeroGas = bundle.operations.map((operation) => {
return {
...operation,
gas: BigNumber.from(0),
};
});
const bundleType = VerificationGatewayFactory.abi.find(
(entry) => "name" in entry && entry.name === "verify",
)?.inputs[0];
const validatedBundle = {
...bundle,
operations: operationsWithZeroGas,
};
const encodedBundleWithZeroSignature = ethers.utils.defaultAbiCoder.encode(
[bundleType as ExplicitAny],
[
{
...validatedBundle,
signature: [BigNumber.from(0), BigNumber.from(0)],
},
],
);
const bundleHash = ethers.utils.keccak256(encodedBundleWithZeroSignature);
const chainId = (await bundleService.ethereumService.provider.getNetwork()).chainId;
const bundleAndChainIdEncoding = ethers.utils.defaultAbiCoder.encode(
["bytes32", "uint256"],
[bundleHash, chainId],
);
const expectedBundleHash = ethers.utils.keccak256(bundleAndChainIdEncoding);
const hash = await bundleService.hashBundle(bundle);
assertEquals(hash, expectedBundleHash);
});
Fixture.test("hashes empty bundle", async (fx) => {
const bundleService = fx.createBundleService();
const bundle = fx.blsWalletSigner.aggregate([]);
const operationsWithZeroGas = bundle.operations.map((operation) => {
return {
...operation,
gas: BigNumber.from(0),
};
});
const bundleType = VerificationGatewayFactory.abi.find(
(entry) => "name" in entry && entry.name === "verify",
)?.inputs[0];
const validatedBundle = {
...bundle,
operations: operationsWithZeroGas,
};
const encodedBundleWithZeroSignature = ethers.utils.defaultAbiCoder.encode(
[bundleType as ExplicitAny],
[
{
...validatedBundle,
signature: [BigNumber.from(0), BigNumber.from(0)],
},
],
);
const bundleHash = ethers.utils.keccak256(encodedBundleWithZeroSignature);
const chainId = (await bundleService.ethereumService.provider.getNetwork()).chainId;
const bundleAndChainIdEncoding = ethers.utils.defaultAbiCoder.encode(
["bytes32", "uint256"],
[bundleHash, chainId],
);
const expectedBundleHash = ethers.utils.keccak256(bundleAndChainIdEncoding);
const hash = await bundleService.hashBundle(bundle);
assertEquals(hash, expectedBundleHash);
assertEquals(await bundleService.bundleTable.count(), 1n);
});
// TODO (merge-ok): Add a mechanism for limiting the number of stored
@@ -437,7 +172,7 @@ Fixture.test("hashes empty bundle", async (fx) => {
// Fixture.test(
// "when future txs reach maxFutureTxs, the oldest ones are dropped",
// async (fx) => {
// const bundleService = fx.createBundleService({
// const bundleService = await fx.createBundleService({
// ...BundleService.defaultConfig,
// maxFutureTxs: 3,
// });

View File

@@ -1,6 +1,7 @@
import Range from "../src/helpers/Range.ts";
import {
assertBundleSucceeds,
assertEquals,
assertBundleSucceeds,
BigNumber,
BlsWalletWrapper,
ethers,
@@ -13,21 +14,23 @@ import Fixture, {
const oneToken = ethers.utils.parseUnits("1.0", 18);
function createBundleService(
async function createBundleService(
fx: Fixture,
feesOverride?: typeof aggregationStrategyDefaultTestConfig["fees"],
feesOverride?: Partial<typeof aggregationStrategyDefaultTestConfig["fees"]>,
) {
return fx.createBundleService(
bundleServiceDefaultTestConfig,
return await fx.createBundleService(
{
...bundleServiceDefaultTestConfig,
maxAggregationSize: 24,
},
{
...aggregationStrategyDefaultTestConfig,
maxGasPerBundle: 3000000,
fees: feesOverride ?? {
type: "token",
address: fx.testErc20.address,
allowLosses: true,
breakevenOperationCount: 4.5,
ethValueInTokens: 1300,
maxAggregationSize: 24,
fees: {
type: `token:${fx.testErc20.address}`,
perGas: BigNumber.from(10_000_000_000),
perByte: BigNumber.from(100_000_000_000_000),
...feesOverride,
},
},
);
@@ -37,7 +40,7 @@ function approveAndSendTokensToOrigin(
fx: Fixture,
nonce: BigNumber,
amount: BigNumber,
): Omit<Operation, "gas"> {
): Operation {
const es = fx.ethereumService;
return {
@@ -48,13 +51,13 @@ function approveAndSendTokensToOrigin(
contractAddress: fx.testErc20.address,
encodedFunction: fx.testErc20.interface.encodeFunctionData(
"approve",
[es.aggregatorUtilities.address, amount],
[es.utilities.address, amount],
),
},
{
ethValue: 0,
contractAddress: es.aggregatorUtilities.address,
encodedFunction: es.aggregatorUtilities.interface.encodeFunctionData(
contractAddress: es.utilities.address,
encodedFunction: es.utilities.interface.encodeFunctionData(
"sendTokenToTxOrigin",
[fx.testErc20.address, amount],
),
@@ -64,11 +67,11 @@ function approveAndSendTokensToOrigin(
}
Fixture.test("does not submit bundle with insufficient fee", async (fx) => {
const bundleService = createBundleService(fx);
const bundleService = await createBundleService(fx);
const [wallet] = await fx.setupWallets(1);
const bundle = await wallet.signWithGasEstimate({
const bundle = wallet.sign({
nonce: await wallet.Nonce(),
actions: [
{
@@ -88,7 +91,7 @@ Fixture.test("does not submit bundle with insufficient fee", async (fx) => {
await fx.testErc20.balanceOf(wallet.address),
BigNumber.from(1000),
);
assertEquals(await bundleService.bundleTable.count(), 1);
assertEquals(await bundleService.bundleTable.count(), 1n);
fx.clock.advance(5000);
await bundleService.submissionTimer.waitForCompletedSubmissions(1);
@@ -98,43 +101,35 @@ Fixture.test("does not submit bundle with insufficient fee", async (fx) => {
await fx.testErc20.balanceOf(wallet.address),
BigNumber.from(1000),
);
assertEquals(await bundleService.bundleTable.count(), 1);
assertEquals(await bundleService.bundleTable.count(), 1n);
});
Fixture.test("submits bundle with sufficient token fee", async (fx) => {
const bundleService = createBundleService(fx);
const bundleService = await createBundleService(fx);
const [wallet] = await fx.setupWallets(1, {
tokenBalance: oneToken,
});
const bundle = await wallet.signWithGasEstimate(
const bundle = wallet.sign(
approveAndSendTokensToOrigin(fx, await wallet.Nonce(), oneToken),
0.1,
);
const bundleResponse = await bundleService.add(bundle);
assertBundleSucceeds(bundleResponse);
assertBundleSucceeds(await bundleService.add(bundle));
assertEquals(
await fx.testErc20.balanceOf(wallet.address),
oneToken,
);
assertEquals(bundleService.bundleTable.count(), 1);
assertEquals(await bundleService.bundleTable.count(), 1n);
fx.clock.advance(5000);
await bundleService.submissionTimer.waitForCompletedSubmissions(1);
await bundleService.waitForConfirmations();
if ("failures" in bundleResponse) {
throw new Error("Bundle failed to be created");
}
const bundleRow = bundleService.bundleTable.findBundle(
bundleResponse.hash,
);
assertEquals(await bundleService.bundleTable.count(), 0n);
assertEquals(bundleRow?.status, "confirmed");
assertEquals(
await fx.testErc20.balanceOf(wallet.address),
BigNumber.from(0),
@@ -142,83 +137,51 @@ Fixture.test("submits bundle with sufficient token fee", async (fx) => {
});
Fixture.test("submits bundle with sufficient eth fee", async (fx) => {
const es = fx.ethereumService;
const bundleService = createBundleService(fx, {
const bundleService = await createBundleService(fx, {
type: "ether",
allowLosses: true,
breakevenOperationCount: 4.5,
perByte: BigNumber.from(1),
perGas: BigNumber.from(1),
});
const fee = BigNumber.from(2_000_000); // wei
const [wallet] = await fx.setupWallets(1, { tokenBalance: 0 });
const nonce = await wallet.Nonce();
await (await fx.adminWallet.sendTransaction({
to: wallet.address,
value: 1,
value: fee,
})).wait();
const estimation = await bundleService.aggregationStrategy.estimateFee(
await wallet.signWithGasEstimate({
nonce,
actions: [
{
ethValue: 1,
contractAddress: es.aggregatorUtilities.address,
encodedFunction: es.aggregatorUtilities.interface.encodeFunctionData(
"sendEthToTxOrigin",
),
},
],
}),
);
const es = fx.ethereumService;
assertEquals(estimation.successes, [true]);
const fee = estimation.feeRequired
.add(estimation.feeRequired.div(5)); // +20% safety margin
await (await fx.adminWallet.sendTransaction({
to: wallet.address,
value: fee
.sub(1), // Already sent 1 wei before
})).wait();
const bundle = await wallet.signWithGasEstimate({
const bundle = wallet.sign({
nonce: await wallet.Nonce(),
actions: [
{
ethValue: fee,
contractAddress: es.aggregatorUtilities.address,
encodedFunction: es.aggregatorUtilities.interface.encodeFunctionData(
contractAddress: es.utilities.address,
encodedFunction: es.utilities.interface.encodeFunctionData(
"sendEthToTxOrigin",
),
},
],
});
const bundleResponse = await bundleService.add(bundle);
assertBundleSucceeds(bundleResponse);
assertBundleSucceeds(await bundleService.add(bundle));
assertEquals(
await fx.adminWallet.provider.getBalance(wallet.address),
fee,
);
assertEquals(await bundleService.bundleTable.count(), 1);
assertEquals(await bundleService.bundleTable.count(), 1n);
fx.clock.advance(5000);
await bundleService.submissionTimer.waitForCompletedSubmissions(1);
await bundleService.waitForConfirmations();
if ("failures" in bundleResponse) {
throw new Error("Bundle failed to be created");
}
const bundleRow = await bundleService.bundleTable.findBundle(
bundleResponse.hash,
);
assertEquals(await bundleService.bundleTable.count(), 0n);
assertEquals(bundleRow?.status, "confirmed");
assertEquals(
await fx.adminWallet.provider.getBalance(wallet.address),
BigNumber.from(0),
@@ -226,77 +189,173 @@ Fixture.test("submits bundle with sufficient eth fee", async (fx) => {
});
Fixture.test("submits 9/10 bundles when 7th has insufficient fee", async (fx) => {
const breakevenOperationCount = 4.5;
const bundleService = await createBundleService(fx);
const bundleService = createBundleService(fx, {
type: "token",
address: fx.testErc20.address,
allowLosses: true,
breakevenOperationCount,
ethValueInTokens: 1,
});
const wallets = await fx.setupWallets(10, {
const [wallet1, wallet2] = await fx.setupWallets(2, {
tokenBalance: oneToken.mul(10),
});
const nonce = await wallets[0].Nonce();
const nonce1 = await wallet1.Nonce();
const nonce2 = await wallet2.Nonce();
async function addBundle(
wallet: BlsWalletWrapper,
nonce: BigNumber,
fee: BigNumber,
) {
const bundle = await wallet.signWithGasEstimate(
const bundle = wallet.sign(
approveAndSendTokensToOrigin(fx, nonce, fee),
0.1,
);
assertBundleSucceeds(await bundleService.add(bundle));
}
// For the purposes of this test, we don't want the bundleService prematurely
// running a submission on fewer bundles than we're trying to process
bundleService.config.breakevenOperationCount = Infinity;
// 6 good bundles
await addBundle(wallets[0], oneToken);
await addBundle(wallets[1], oneToken);
await addBundle(wallets[2], oneToken);
await addBundle(wallets[3], oneToken);
await addBundle(wallets[4], oneToken);
await addBundle(wallets[5], oneToken);
// 6 good bundles from wallet 1 (each pays one token)
await addBundle(wallet1, nonce1.add(0), oneToken);
await addBundle(wallet1, nonce1.add(1), oneToken);
await addBundle(wallet1, nonce1.add(2), oneToken);
await addBundle(wallet1, nonce1.add(3), oneToken);
await addBundle(wallet1, nonce1.add(4), oneToken);
await addBundle(wallet1, nonce1.add(5), oneToken);
// 7th bundle should fail because 1 wei is an insufficient fee
await addBundle(wallets[6], BigNumber.from(1));
await addBundle(wallet1, nonce1.add(6), BigNumber.from(1));
// 3 more good bundles
await addBundle(wallets[7], oneToken);
await addBundle(wallets[8], oneToken);
await addBundle(wallets[9], oneToken);
// 3 more good bundles. These are from a different wallet so that the nonces
// can be correct independent of the success/failure of bundle #7 above.
await addBundle(wallet2, nonce2.add(0), oneToken);
await addBundle(wallet2, nonce2.add(1), oneToken);
await addBundle(wallet2, nonce2.add(2), oneToken);
// Restore this value now that all the bundles are added together
bundleService.config.breakevenOperationCount = breakevenOperationCount;
assertEquals(bundleService.bundleTable.count(), 10);
assertEquals(await bundleService.bundleTable.count(), 10n);
fx.clock.advance(5000);
await bundleService.submissionTimer.waitForCompletedSubmissions(1);
await bundleService.waitForConfirmations();
const remainingBundles = fx.allBundles(bundleService);
const remainingPendingBundles = remainingBundles
.filter((bundle) => bundle.status === "pending");
assertEquals(await bundleService.bundleTable.count(), 1n);
assertEquals(remainingBundles.length, 10);
assertEquals(remainingPendingBundles.length, 1);
assertEquals(
await fx.testErc20.balanceOf(wallet1.address),
oneToken.mul(4), // 6 tokens spent from wallet 1
);
await Promise.all(wallets.map((wallet, i) =>
(async () => {
assertEquals(
await fx.testErc20.balanceOf(wallet.address),
// Every wallet should have successfully spent one token, except the 7th
i === 6 ? oneToken.mul(10) : oneToken.mul(9),
);
})()
));
assertEquals(
await fx.testErc20.balanceOf(wallet2.address),
oneToken.mul(7), // 3 tokens spent from wallet 2
);
});
Fixture.test("submits 9/10 bundles when 7th has insufficient gas-based fee", async (fx) => {
const bundleService = await createBundleService(fx, {
// This test is targeting the logic which needs to run when the
// calldata-based gas shortcut doesn't work. We just set the per byte fee to
// zero to make that clear.
perByte: BigNumber.from(0),
});
const baseFee = BigNumber.from(1_000_000).mul(1e9); // Note 1
const fee = BigNumber.from(1_950_000).mul(1e9);
const [wallet1, wallet2] = await fx.setupWallets(2, {
tokenBalance: fee.mul(10),
});
const nonce1 = await wallet1.Nonce();
const nonce2 = await wallet2.Nonce();
async function addBundle(
wallet: BlsWalletWrapper,
nonce: BigNumber,
fee: BigNumber,
) {
const bundle = wallet.sign(
approveAndSendTokensToOrigin(fx, nonce, fee),
);
assertBundleSucceeds(await bundleService.add(bundle));
}
// 6 good bundles from wallet 1 (each pays one token)
await addBundle(wallet1, nonce1.add(0), fee.add(baseFee)); // Note 1
await addBundle(wallet1, nonce1.add(1), fee);
await addBundle(wallet1, nonce1.add(2), fee);
await addBundle(wallet1, nonce1.add(3), fee);
await addBundle(wallet1, nonce1.add(4), fee);
await addBundle(wallet1, nonce1.add(5), fee);
// Note 1: The first bundle has a base fee added because there's an overhead
// of doing a bundle. This is a bit unrealistic but it makes the test less
// brittle.
// 7th bundle should fail because 1 wei is an insufficient fee
await addBundle(wallet1, nonce1.add(6), BigNumber.from(1));
// 3 more good bundles. These are from a different wallet so that the nonces
// can be correct independent of the success/failure of bundle #7 above.
await addBundle(wallet2, nonce2.add(0), fee);
await addBundle(wallet2, nonce2.add(1), fee);
await addBundle(wallet2, nonce2.add(2), fee);
assertEquals(await bundleService.bundleTable.count(), 10n);
fx.clock.advance(5000);
await bundleService.submissionTimer.waitForCompletedSubmissions(1);
await bundleService.waitForConfirmations();
assertEquals(await bundleService.bundleTable.count(), 1n);
assertEquals(
await fx.testErc20.balanceOf(wallet1.address),
fee.mul(4).sub(baseFee), // 6 fees spent from wallet 1
);
assertEquals(
await fx.testErc20.balanceOf(wallet2.address),
fee.mul(7), // 3 fees spent from wallet 2
);
});
Fixture.test("submits 1/3 bundles when bundle#3 fails the shortcut fee test but bundle#2 also fails the full fee test", async (fx) => {
const bundleService = await createBundleService(fx, {
perGas: BigNumber.from(100_000_000_000),
});
const [wallet] = await fx.setupWallets(2, {
tokenBalance: oneToken.mul(10),
});
const nonce = await wallet.Nonce();
const bundleFees = [
// Passes
BigNumber.from(140_000_000).mul(1e9),
// Passes shortcut test but fails full test
BigNumber.from(80_000_000).mul(1e9),
// Fails shortcut test
BigNumber.from(1),
];
for (const i of Range(bundleFees.length)) {
const bundle = wallet.sign(
approveAndSendTokensToOrigin(fx, nonce.add(i), bundleFees[i]),
);
assertBundleSucceeds(await bundleService.add(bundle));
}
assertEquals(await bundleService.bundleTable.count(), 3n);
fx.clock.advance(5000);
await bundleService.submissionTimer.waitForCompletedSubmissions(1);
await bundleService.waitForConfirmations();
assertEquals(await bundleService.bundleTable.count(), 2n);
assertEquals(
await fx.testErc20.balanceOf(wallet.address),
oneToken.mul(10).sub(bundleFees[0]),
);
});

View File

@@ -1,29 +1,30 @@
import { assertBundleSucceeds, assertEquals, BigNumber } from "./deps.ts";
import Fixture, { bundleServiceDefaultTestConfig } from "./helpers/Fixture.ts";
import { assertEquals, assertBundleSucceeds, BigNumber } from "./deps.ts";
import Fixture, {
aggregationStrategyDefaultTestConfig,
bundleServiceDefaultTestConfig,
} from "./helpers/Fixture.ts";
import Range from "../src/helpers/Range.ts";
import { AggregationStrategyConfig } from "../src/app/AggregationStrategy.ts";
import nil from "../src/helpers/nil.ts";
const bundleServiceConfig = {
...bundleServiceDefaultTestConfig,
maxAggregationSize: 5,
maxAggregationDelayMillis: 5000,
};
const aggregationStrategyConfig: AggregationStrategyConfig = {
maxGasPerBundle: 1_000_000,
fees: nil,
bundleCheckingConcurrency: 8,
const aggregationStrategyConfig = {
...aggregationStrategyDefaultTestConfig,
maxAggregationSize: 5,
};
Fixture.test("submits a single action in a timed submission", async (fx) => {
const bundleService = fx.createBundleService(
const bundleService = await fx.createBundleService(
bundleServiceConfig,
aggregationStrategyConfig,
);
const [wallet] = await fx.setupWallets(1);
const bundle = await wallet.signWithGasEstimate({
const bundle = wallet.sign({
nonce: await wallet.Nonce(),
actions: [
{
@@ -37,14 +38,13 @@ Fixture.test("submits a single action in a timed submission", async (fx) => {
],
});
const bundleResponse = await bundleService.add(bundle);
assertBundleSucceeds(bundleResponse);
assertBundleSucceeds(await bundleService.add(bundle));
assertEquals(
await fx.testErc20.balanceOf(wallet.address),
BigNumber.from(1000),
);
assertEquals(await bundleService.bundleTable.count(), 1);
assertEquals(await bundleService.bundleTable.count(), 1n);
fx.clock.advance(5000);
await bundleService.submissionTimer.waitForCompletedSubmissions(1);
@@ -54,46 +54,32 @@ Fixture.test("submits a single action in a timed submission", async (fx) => {
await fx.testErc20.balanceOf(wallet.address),
BigNumber.from(1001),
);
assertEquals(await bundleService.bundleTable.count(), 1);
if ("failures" in bundleResponse) {
throw new Error("Bundle failed to be created");
}
const bundleRow = await bundleService.bundleTable.findBundle(
bundleResponse.hash,
);
assertEquals(bundleRow?.status, "confirmed");
const bundleReceipt = bundleService.receiptFromBundle(bundleRow!);
assertEquals(bundleReceipt?.bundleHash, bundleResponse.hash);
assertEquals(await bundleService.bundleTable.count(), 0n);
});
Fixture.test("submits a full submission without delay", async (fx) => {
const bundleService = fx.createBundleService(
const bundleService = await fx.createBundleService(
bundleServiceConfig,
aggregationStrategyConfig,
);
const wallets = await fx.setupWallets(5);
const firstWallet = wallets[0];
const nonce = await firstWallet.Nonce();
const [wallet] = await fx.setupWallets(1);
const walletNonce = await wallet.Nonce();
const bundles = await Promise.all(
wallets.map((wallet) =>
wallet.signWithGasEstimate({
nonce,
actions: [
{
ethValue: 0,
contractAddress: fx.testErc20.address,
encodedFunction: fx.testErc20.interface.encodeFunctionData(
"mint",
[firstWallet.address, 1],
),
},
],
})
),
const bundles = Range(5).map((i) =>
wallet.sign({
nonce: walletNonce.add(i),
actions: [
{
ethValue: 0,
contractAddress: fx.testErc20.address,
encodedFunction: fx.testErc20.interface.encodeFunctionData(
"mint",
[wallet.address, 1],
),
},
],
})
);
for (const b of bundles) {
@@ -106,43 +92,43 @@ Fixture.test("submits a full submission without delay", async (fx) => {
// Check mints have occurred, ensuring a submission has occurred even though
// the clock has not advanced
assertEquals(
await fx.testErc20.balanceOf(firstWallet.address),
await fx.testErc20.balanceOf(wallet.address),
BigNumber.from(1005), // 1000 (initial) + 5 * 1 (mint txs)
);
});
Fixture.test(
"submits multiple aggregations when provided with too many user bundles",
[
"submits submission from over-full bundle table without delay and submits",
"leftover bundles after delay",
].join(" "),
async (fx) => {
const bundleService = fx.createBundleService(
const bundleService = await fx.createBundleService(
bundleServiceConfig,
aggregationStrategyConfig,
);
const wallets = await fx.setupWallets(7);
const firstWallet = wallets[0];
const nonce = await firstWallet.Nonce();
const [wallet] = await fx.setupWallets(1);
const walletNonce = await wallet.Nonce();
const bundles = await Promise.all(
wallets.map((wallet) =>
wallet.signWithGasEstimate({
nonce,
actions: [
{
ethValue: 0,
contractAddress: fx.testErc20.address,
encodedFunction: fx.testErc20.interface.encodeFunctionData(
"mint",
[firstWallet.address, 1],
),
},
],
})
),
const bundles = Range(7).map((i) =>
wallet.sign({
nonce: walletNonce.add(i),
actions: [
{
ethValue: 0,
contractAddress: fx.testErc20.address,
encodedFunction: fx.testErc20.interface.encodeFunctionData(
"mint",
[wallet.address, 1],
),
},
],
})
);
// Prevent submission from triggering on max aggregation size.
bundleService.config.breakevenOperationCount = Infinity;
bundleService.config.maxAggregationSize = Infinity;
for (const b of bundles) {
assertBundleSucceeds(await bundleService.add(b));
@@ -151,34 +137,36 @@ Fixture.test(
// Restore max aggregation size for testing. (This way we hit the edge case
// that the aggregator has access to more actions than it can fit into a
// single submission, which happens but is race-dependent.)
bundleService.config.breakevenOperationCount = 4.5;
bundleService.config.maxAggregationSize = 5;
await bundleService.submissionTimer.trigger();
await bundleService.waitForConfirmations();
if ((fx.allBundles(bundleService)).length > 0) {
await bundleService.submissionTimer.trigger();
await bundleService.waitForConfirmations();
}
// Check mints have occurred
// Check mints have occurred, ensuring a submission has occurred even though the
// clock has not advanced
assertEquals(
await fx.testErc20.balanceOf(firstWallet.address),
await fx.testErc20.balanceOf(wallet.address),
BigNumber.from(1005), // 1000 (initial) + 5 * 1 (mint txs)
);
// Leftover txs
const remainingBundles = await fx.allBundles(bundleService);
assertEquals(remainingBundles.length, 2);
await bundleService.submissionTimer.trigger();
await bundleService.waitForConfirmations();
assertEquals(
await fx.testErc20.balanceOf(wallet.address),
BigNumber.from(1007), // 1000 (initial) + 7 * 1 (mint txs)
);
const confirmationEvents = fx.appEvents.filter((ev) =>
ev.type === "submission-confirmed"
);
assertEquals(confirmationEvents.length, 2);
},
);
Fixture.test(
"submits 3 bundles in reverse (incorrect) nonce order",
async (fx) => {
const bundleService = fx.createBundleService(
const bundleService = await fx.createBundleService(
bundleServiceConfig,
aggregationStrategyConfig,
);
@@ -189,7 +177,6 @@ Fixture.test(
const bundles = Range(3).reverse().map((i) =>
wallet.sign({
nonce: walletNonce.add(i),
gas: 1_000_000,
actions: [
{
ethValue: 0,
@@ -218,11 +205,8 @@ Fixture.test(
);
assertEquals(await wallet.Nonce(), BigNumber.from(2));
// 2 mints should be left as both failed submission pre-check
let remainingBundles = fx.allBundles(bundleService);
let remainingPendingBundles = remainingBundles.filter((bundle) =>
bundle.status === "pending"
);
assertEquals(remainingPendingBundles.length, 2);
let remainingBundles = await fx.allBundles(bundleService);
assertEquals(remainingBundles.length, 2);
// Re-run submissions
await bundleService.submissionTimer.trigger();
@@ -236,11 +220,8 @@ Fixture.test(
);
assertEquals(await wallet.Nonce(), BigNumber.from(3));
// 1 mints (nonce 3) should be left as it failed submission pre-check
remainingBundles = fx.allBundles(bundleService);
remainingPendingBundles = remainingBundles.filter((bundle) =>
bundle.status === "pending"
);
assertEquals(remainingPendingBundles.length, 1);
remainingBundles = await fx.allBundles(bundleService);
assertEquals(remainingBundles.length, 1);
// Simulate 1 block being mined
await fx.mine(1);
@@ -256,16 +237,13 @@ Fixture.test(
BigNumber.from(1003), // 1000 (initial) + 3 * 1 (mint txs)
);
assertEquals(await wallet.Nonce(), BigNumber.from(4));
remainingBundles = fx.allBundles(bundleService);
remainingPendingBundles = remainingBundles.filter((bundle) =>
bundle.status === "pending"
);
assertEquals(remainingPendingBundles.length, 0);
remainingBundles = await fx.allBundles(bundleService);
assertEquals(remainingBundles.length, 0);
},
);
Fixture.test("retains failing bundle when its eligibility delay is smaller than MAX_ELIGIBILITY_DELAY", async (fx) => {
const bundleService = fx.createBundleService(
const bundleService = await fx.createBundleService(
{
...bundleServiceConfig,
maxEligibilityDelay: 300,
@@ -278,7 +256,6 @@ Fixture.test("retains failing bundle when its eligibility delay is smaller than
const bundle = wallet.sign({
// Future nonce makes this a failing bundle
nonce: (await wallet.Nonce()).add(1),
gas: 1_000_000,
actions: [
{
ethValue: 0,
@@ -297,16 +274,16 @@ Fixture.test("retains failing bundle when its eligibility delay is smaller than
await bundleService.runPendingTasks();
assertBundleSucceeds(res);
assertEquals(await bundleService.bundleTable.count(), 1);
assertEquals(await bundleService.bundleTable.count(), 1n);
fx.clock.advance(5000);
await bundleService.submissionTimer.waitForCompletedSubmissions(1);
assertEquals(await bundleService.bundleTable.count(), 1);
assertEquals(await bundleService.bundleTable.count(), 1n);
});
Fixture.test("updates status of failing bundle when its eligibility delay is larger than MAX_ELIGIBILITY_DELAY", async (fx) => {
const bundleService = fx.createBundleService(
Fixture.test("removes failing bundle when its eligibility delay is larger than MAX_ELIGIBILITY_DELAY", async (fx) => {
const bundleService = await fx.createBundleService(
{
...bundleServiceConfig,
maxEligibilityDelay: 300,
@@ -319,7 +296,6 @@ Fixture.test("updates status of failing bundle when its eligibility delay is lar
const bundle = wallet.sign({
// Future nonce makes this a failing bundle
nonce: (await wallet.Nonce()).add(1),
gas: 1_000_000,
actions: [
{
ethValue: 0,
@@ -338,7 +314,7 @@ Fixture.test("updates status of failing bundle when its eligibility delay is lar
await bundleService.runPendingTasks();
assertBundleSucceeds(res);
assertEquals(await bundleService.bundleTable.count(), 1);
assertEquals(await bundleService.bundleTable.count(), 1n);
const [bundleRow] = await bundleService.bundleTable.all();
@@ -350,65 +326,5 @@ Fixture.test("updates status of failing bundle when its eligibility delay is lar
fx.clock.advance(5000);
await bundleService.submissionTimer.waitForCompletedSubmissions(1);
assertEquals(await bundleService.bundleTable.count(), 1);
if ("failures" in res) {
throw new Error("Bundle failed to be created");
}
const failedBundleRow = await bundleService.bundleTable.findBundle(res.hash);
assertEquals(failedBundleRow?.status, "failed");
assertEquals(await bundleService.bundleTable.count(), 0n);
});
Fixture.test("Retrieves all sub bundles included in a submitted bundle from single a sub bundle", async (fx) => {
const bundleService = fx.createBundleService(
bundleServiceConfig,
aggregationStrategyConfig,
);
const wallets = await fx.setupWallets(3);
const firstWallet = wallets[0];
const nonce = await firstWallet.Nonce();
const bundles = await Promise.all(
wallets.map((wallet) =>
wallet.signWithGasEstimate({
nonce,
actions: [
{
ethValue: 0,
contractAddress: fx.testErc20.address,
encodedFunction: fx.testErc20.interface.encodeFunctionData(
"mint",
[firstWallet.address, 1],
),
},
],
})
),
);
const subBundleHashes = await Promise.all(bundles.map(async (bundle) => {
const res = await bundleService.add(bundle);
if ("failures" in res) {
throw new Error("Bundle failed to be created");
}
return res.hash;
}));
await bundleService.submissionTimer.trigger();
await bundleService.waitForConfirmations();
const firstSubBundle = bundleService.lookupBundle(subBundleHashes[0]);
const secondSubBundle = bundleService.lookupBundle(subBundleHashes[1]);
const thirdSubBundle = bundleService.lookupBundle(subBundleHashes[2]);
const orderedSubBundles = [firstSubBundle, secondSubBundle, thirdSubBundle].sort((a, b) => a!.id - b!.id);
for (const subBundleHash of subBundleHashes) {
const aggregateBundle = bundleService.lookupAggregateBundle(subBundleHash);
assertEquals(aggregateBundle?.[0], orderedSubBundles[0]);
assertEquals(aggregateBundle?.[1], orderedSubBundles[1]);
assertEquals(aggregateBundle?.[2], orderedSubBundles[2]);
}
});

View File

@@ -1,19 +1,43 @@
import { assertEquals, BigNumber, sqlite } from "./deps.ts";
import { assertEquals, BigNumber } from "./deps.ts";
import BundleTable, { BundleRow } from "../src/app/BundleTable.ts";
import nil from "../src/helpers/nil.ts";
import createQueryClient from "../src/app/createQueryClient.ts";
let counter = 0;
function test(name: string, fn: (bundleTable: BundleTable) => Promise<void>) {
Deno.test({
name,
sanitizeResources: false,
fn: async () => {
const tableName = `bundles_test_${counter++}_${Date.now()}`;
const queryClient = createQueryClient(() => {});
const table = await BundleTable.create(queryClient, tableName);
try {
await fn(table);
} finally {
try {
await table.drop();
await queryClient.disconnect();
} catch (error) {
console.error("cleanup error:", error);
}
}
},
});
}
const sampleRows: BundleRow[] = [
{
id: 1,
id: 0,
hash: "0x0",
status: "pending",
bundle: {
senderPublicKeys: [["0x01", "0x02", "0x03", "0x04"]],
operations: [
{
nonce: "0x01",
gas: "0x01",
actions: [
{
ethValue: "0x00",
@@ -27,25 +51,21 @@ const sampleRows: BundleRow[] = [
},
eligibleAfter: BigNumber.from(0),
nextEligibilityDelay: BigNumber.from(1),
submitError: nil,
receipt: nil,
aggregateHash: nil,
},
];
Deno.test("Starts with zero transactions", () => {
const table = new BundleTable(new sqlite.DB());
assertEquals(table.count(), 0);
test("Starts with zero transactions", async (table) => {
assertEquals(await table.count(), 0n);
});
Deno.test("Has one transaction after adding transaction", () => {
const table = new BundleTable(new sqlite.DB());
table.add(sampleRows[0]);
assertEquals(table.count(), 1);
test("Has one transaction after adding transaction", async (table) => {
await table.add(sampleRows[0]);
assertEquals(await table.count(), 1n);
});
Deno.test("Can retrieve transaction", () => {
const table = new BundleTable(new sqlite.DB());
table.add(sampleRows[0]);
assertEquals(table.all(), [{ ...sampleRows[0] }]);
test("Can retrieve transaction", async (table) => {
await table.add(sampleRows[0]);
assertEquals(await table.all(), [{ ...sampleRows[0] }]);
});

View File

@@ -8,7 +8,7 @@ Fixture.test("EthereumService submits mint action", async (fx) => {
const [wallet] = await fx.setupWallets(1);
const startBalance = await fx.testErc20.balanceOf(wallet.address);
const bundle = await wallet.signWithGasEstimate({
const bundle = wallet.sign({
nonce: await wallet.Nonce(),
actions: [
{
@@ -32,7 +32,7 @@ Fixture.test("EthereumService submits mint action", async (fx) => {
Fixture.test("EthereumService submits transfer action", async (fx) => {
const wallets = await fx.setupWallets(2);
const bundle = await wallets[0].signWithGasEstimate({
const bundle = wallets[0].sign({
nonce: await wallets[0].Nonce(),
actions: [
{
@@ -62,7 +62,6 @@ Fixture.test("EthereumService submits aggregated bundle", async (fx) => {
const bundle = fx.blsWalletSigner.aggregate([
wallet.sign({
nonce: walletNonce,
gas: 1_000_000,
actions: [
{
ethValue: 0,
@@ -76,7 +75,6 @@ Fixture.test("EthereumService submits aggregated bundle", async (fx) => {
}),
wallet.sign({
nonce: walletNonce.add(1),
gas: 1_000_000,
actions: [
{
ethValue: 0,
@@ -104,24 +102,21 @@ Fixture.test("EthereumService submits large aggregate mint bundle", async (fx) =
const size = 11;
const bundle = fx.blsWalletSigner.aggregate(
await Promise.all(
Range(size).map((i) =>
wallet.sign({
nonce: walletNonce.add(i),
gas: 1_000_000,
actions: [
// TODO (merge-ok): Add single operation multi-action variation of this test
{
ethValue: 0,
contractAddress: fx.testErc20.address,
encodedFunction: fx.testErc20.interface.encodeFunctionData(
"mint",
[wallet.address, 1],
),
},
],
})
),
Range(size).map((i) =>
wallet.sign({
nonce: walletNonce.add(i),
actions: [
// TODO (merge-ok): Add single operation multi-action variation of this test
{
ethValue: 0,
contractAddress: fx.testErc20.address,
encodedFunction: fx.testErc20.interface.encodeFunctionData(
"mint",
[wallet.address, 1],
),
},
],
})
),
);
@@ -142,7 +137,6 @@ Fixture.test("EthereumService sends large aggregate transfer bundle", async (fx)
Range(size).map((i) =>
sendWallet.sign({
nonce: sendWalletNonce.add(i),
gas: 1_000_000,
actions: [
{
ethValue: 0,
@@ -176,7 +170,6 @@ Fixture.test(
Range(5).map((i) =>
wallet.sign({
nonce: walletNonce.add(i),
gas: 1_000_000,
actions: [
{
ethValue: 0,
@@ -245,7 +238,7 @@ Fixture.test("callStaticSequence - correctly measures transfer", async (fx) => {
value: transferAmount,
})).wait();
const bundle = await sendWallet.signWithGasEstimate({
const bundle = sendWallet.sign({
nonce: await sendWallet.Nonce(),
actions: [
{
@@ -260,9 +253,9 @@ Fixture.test("callStaticSequence - correctly measures transfer", async (fx) => {
const es = fx.ethereumService;
const results = await es.callStaticSequence(
es.Call(es.aggregatorUtilities, "ethBalanceOf", [recvWallet.address]),
es.Call(es.utilities, "ethBalanceOf", [recvWallet.address]),
es.Call(es.verificationGateway, "processBundle", [bundle]),
es.Call(es.aggregatorUtilities, "ethBalanceOf", [recvWallet.address]),
es.Call(es.utilities, "ethBalanceOf", [recvWallet.address]),
);
const [balanceResultBefore, , balanceResultAfter] = results;

View File

@@ -1,10 +0,0 @@
import { assertEquals } from "./deps.ts";
import Fixture from "./helpers/Fixture.ts";
Fixture.test("HealthService returns healthy", async (fx) => {
const healthCheckService = fx.createHealthCheckService()
const healthStatus = await healthCheckService.getHealth();
const expected = {"status":"healthy"};
assertEquals(JSON.stringify(healthStatus), JSON.stringify(expected));
});

View File

@@ -15,7 +15,7 @@ export function assertEquals<L, R extends L>(left: L, right: R) {
export function assertBundleSucceeds(res: AddBundleResponse) {
if ("failures" in res) {
throw new AssertionError(`expected bundle to succeed. failures: ${JSON.stringify(res.failures)}`);
throw new AssertionError("expected bundle to succeed");
}
}

View File

@@ -5,13 +5,14 @@ import {
BlsWalletWrapper,
ethers,
MockERC20,
MockERC20Factory,
MockERC20__factory,
NetworkConfig,
sqlite,
QueryClient,
} from "../../deps.ts";
import testRng from "./testRng.ts";
import EthereumService from "../../src/app/EthereumService.ts";
import createQueryClient from "../../src/app/createQueryClient.ts";
import Range from "../../src/helpers/Range.ts";
import Mutex from "../../src/helpers/Mutex.ts";
import TestClock from "./TestClock.ts";
@@ -22,33 +23,31 @@ import nil, { isNotNil } from "../../src/helpers/nil.ts";
import getNetworkConfig from "../../src/helpers/getNetworkConfig.ts";
import BundleService from "../../src/app/BundleService.ts";
import BundleTable, { BundleRow } from "../../src/app/BundleTable.ts";
import AggregationStrategy, {
AggregationStrategyConfig,
} from "../../src/app/AggregationStrategy.ts";
import HealthService from "../../src/app/HealthService.ts";
import AggregationStrategy from "../../src/app/AggregationStrategy.ts";
// deno-lint-ignore no-explicit-any
type ExplicitAny = any;
let existingClient: QueryClient | nil = nil;
export const bundleServiceDefaultTestConfig:
typeof BundleService.defaultConfig = {
bundleQueryLimit: 100,
breakevenOperationCount: 4.5,
maxAggregationSize: 12,
maxAggregationDelayMillis: 5000,
maxUnconfirmedAggregations: 3,
maxEligibilityDelay: 300,
isOptimism: false,
};
export const aggregationStrategyDefaultTestConfig: AggregationStrategyConfig = {
maxGasPerBundle: 1500000,
fees: {
type: "ether",
allowLosses: true,
breakevenOperationCount: 4.5,
},
bundleCheckingConcurrency: 8,
};
export const aggregationStrategyDefaultTestConfig:
typeof AggregationStrategy.defaultConfig = {
maxAggregationSize: 12,
fees: {
type: "ether",
perGas: BigNumber.from(0),
perByte: BigNumber.from(0),
},
};
export default class Fixture {
static test(
@@ -76,10 +75,11 @@ export default class Fixture {
static async create(testName: string): Promise<Fixture> {
const netCfg = await getNetworkConfig();
const rng = testRng.seed(testName);
const emit = (evt: AppEvent) => fx.emit(evt);
const ethereumService = await EthereumService.create(
emit,
(evt) => fx.emit(evt),
netCfg.addresses.verificationGateway,
netCfg.addresses.utilities,
env.PRIVATE_KEY_AGG,
);
@@ -96,7 +96,6 @@ export default class Fixture {
ethereumService.blsWalletSigner,
ethereumService,
aggregationStrategyDefaultTestConfig,
emit,
),
netCfg,
);
@@ -133,7 +132,7 @@ export default class Fixture {
public aggregationStrategy: AggregationStrategy,
public networkConfig: NetworkConfig,
) {
this.testErc20 = MockERC20Factory.connect(
this.testErc20 = MockERC20__factory.connect(
this.networkConfig.addresses.testToken,
this.ethereumService.wallet.provider,
);
@@ -148,37 +147,33 @@ export default class Fixture {
return this.rng.seed("blsPrivateKey", ...extraSeeds).address();
}
createBundleService(
async createBundleService(
config = bundleServiceDefaultTestConfig,
aggregationStrategyConfig = aggregationStrategyDefaultTestConfig,
) {
const suffix = this.rng.seed("table-name-suffix").address().slice(2, 12);
existingClient = createQueryClient(this.emit, existingClient);
const queryClient = existingClient;
const tablesMutex = new Mutex();
const table = new BundleTable(
new sqlite.DB(),
(sql, params) => {
if (env.LOG_QUERIES) {
this.emit({
type: "db-query",
data: { sql, params },
});
}
},
);
const tableName = `bundles_test_${suffix}`;
const table = await BundleTable.createFresh(queryClient, tableName);
const aggregationStrategy =
const aggregationStrategy = (
aggregationStrategyConfig === aggregationStrategyDefaultTestConfig
? this.aggregationStrategy
: new AggregationStrategy(
this.blsWalletSigner,
this.ethereumService,
aggregationStrategyConfig,
this.emit,
);
)
);
const bundleService = new BundleService(
this.emit,
this.clock,
queryClient,
tablesMutex,
table,
this.blsWalletSigner,
@@ -196,18 +191,16 @@ export default class Fixture {
}
async mine(numBlocks: number): Promise<void> {
const provider = this.ethereumService.wallet
.provider as ethers.providers.JsonRpcProvider;
for (let i = 0; i < numBlocks; i++) {
// Sending 0 eth instead of using evm_mine since geth doesn't support it.
await (await this.adminWallet.sendTransaction({
to: this.adminWallet.address,
value: 0,
})).wait();
await provider.send("evm_mine", []);
}
}
allBundles(
bundleService: BundleService,
): BundleRow[] {
): Promise<BundleRow[]> {
return bundleService.bundleTable.all();
}
@@ -248,7 +241,7 @@ export default class Fixture {
const topUp = BigNumber.from(tokenBalance).sub(balance);
if (topUp.gt(0)) {
return await wallet.signWithGasEstimate({
return wallet.sign({
nonce: (await wallet.Nonce()).add(i),
actions: [
{
@@ -264,7 +257,7 @@ export default class Fixture {
}
if (topUp.lt(0)) {
return await wallet.signWithGasEstimate({
return wallet.sign({
nonce: (await wallet.Nonce()).add(i),
actions: [
{
@@ -296,12 +289,6 @@ export default class Fixture {
return wallets;
}
createHealthCheckService() {
const healthCheckService = new HealthService();
return healthCheckService;
}
async cleanup() {
for (const job of this.cleanupJobs) {
await job();

View File

@@ -29,7 +29,6 @@ Deno.test("parseBundleDto accepts dummy values", () => {
"operations": [
{
"nonce": "0x01",
"gas": "0x01",
"actions": [
{
"ethValue": "0x00",

Some files were not shown because too many files have changed in this diff Show More