Lodestar prover for execution api (#5222)

* Add package skeleton

* Add implementation for the verification

* Update package versions

* Update teh provdier structure to store full execution payload

* Add a test script

* Split the utils to scoped files

* Add multiple web3 providers

* Add a proxy factory method

* Add the CLI for the prover proxy

* Rename few functions to make those consistent

* Add some required unit tests

* Add unit tests

* Add e2e tests

* Fix duplicate Buffer error

* Fix lint error

* Fix lint errors

* Update the lightclient to sync in background

* Validate the execution payload

* Update initWithRest to init

* Update the max limit for the payloads to not cross finalized slot

* Remove the usage for finalizedPayloadHeaders tracking

* Rename update to lcHeader

* Update the code as per feedback

* Fix readme

* Update the payload store logic

* Add the cleanup logic to payload store

* Update the code as per feedback

* Fix few types in the tests

* Move the usage to isForkWithdrawls

* Fix a unit test
This commit is contained in:
Nazar Hussain
2023-03-26 15:28:07 +02:00
committed by GitHub
parent 6d3ec2b5ec
commit 358b9e0410
51 changed files with 3960 additions and 60 deletions

View File

@@ -1,13 +1,16 @@
import {allForks} from "@lodestar/types";
import {RunStatusCode} from "./index.js";
export enum LightclientEvent {
lightClientOptimisticUpdate = "light_client_optimistic_update",
lightClientFinalityUpdate = "light_client_finality_update",
lightClientOptimisticHeader = "light_client_optimistic_header",
lightClientFinalityHeader = "light_client_finality_header",
statusChange = "light_client_status_change",
}
export type LightclientEmitterEvents = {
[LightclientEvent.lightClientOptimisticUpdate]: (newHeader: allForks.LightClientHeader) => void;
[LightclientEvent.lightClientFinalityUpdate]: (newHeader: allForks.LightClientHeader) => void;
[LightclientEvent.lightClientOptimisticHeader]: (newHeader: allForks.LightClientHeader) => void;
[LightclientEvent.lightClientFinalityHeader]: (newHeader: allForks.LightClientHeader) => void;
[LightclientEvent.statusChange]: (code: RunStatusCode) => void;
};
export type LightclientEmitter = MittEmitter<LightclientEmitterEvents>;

View File

@@ -49,12 +49,14 @@ const ON_ERROR_RETRY_MS = 1000;
// TODO: Customize with option
const ALLOW_FORCED_UPDATES = true;
enum RunStatusCode {
export enum RunStatusCode {
uninitialized,
started,
syncing,
stopped,
}
type RunStatus =
| {code: RunStatusCode.uninitialized}
| {code: RunStatusCode.started; controller: AbortController}
| {code: RunStatusCode.syncing}
| {code: RunStatusCode.stopped};
@@ -104,7 +106,7 @@ export class Lightclient {
private readonly lightclientSpec: LightclientSpec;
private status: RunStatus = {code: RunStatusCode.stopped};
private runStatus: RunStatus = {code: RunStatusCode.stopped};
constructor({config, logger, genesisData, bootstrap, transport}: LightclientInitArgs) {
this.genesisTime = genesisData.genesisTime;
@@ -116,17 +118,18 @@ export class Lightclient {
this.config = createBeaconConfig(config, this.genesisValidatorsRoot);
this.logger = logger ?? getLcLoggerConsole();
this.transport = transport;
this.runStatus = {code: RunStatusCode.uninitialized};
this.lightclientSpec = new LightclientSpec(
this.config,
{
allowForcedUpdates: ALLOW_FORCED_UPDATES,
onSetFinalizedHeader: (header) => {
this.emitter.emit(LightclientEvent.lightClientFinalityUpdate, header);
this.emitter.emit(LightclientEvent.lightClientFinalityHeader, header);
this.logger.debug("Updated state.finalizedHeader", {slot: header.beacon.slot});
},
onSetOptimisticHeader: (header) => {
this.emitter.emit(LightclientEvent.lightClientOptimisticUpdate, header);
this.emitter.emit(LightclientEvent.lightClientOptimisticHeader, header);
this.logger.debug("Updated state.optimisticHeader", {slot: header.beacon.slot});
},
},
@@ -134,6 +137,10 @@ export class Lightclient {
);
}
get status(): RunStatusCode {
return this.runStatus.code;
}
// Embed lightweight clock. The epoch cycles are handled with `this.runLoop()`
get currentSlot(): number {
return getCurrentSlot(this.config, this.genesisTime);
@@ -167,16 +174,20 @@ export class Lightclient {
}
stop(): void {
if (this.status.code !== RunStatusCode.started) return;
if (this.runStatus.code !== RunStatusCode.started) return;
this.status.controller.abort();
this.status = {code: RunStatusCode.stopped};
this.runStatus.controller.abort();
this.updateRunStatus({code: RunStatusCode.stopped});
}
getHead(): allForks.LightClientHeader {
return this.lightclientSpec.store.optimisticHeader;
}
getFinalized(): allForks.LightClientHeader {
return this.lightclientSpec.store.finalizedHeader;
}
async sync(fromPeriod: SyncPeriod, toPeriod: SyncPeriod): Promise<void> {
// Initialize the BLS implementation. This may requires initializing the WebAssembly instance
// so why it's a an async process. This should be initialized once before any bls operations.
@@ -212,12 +223,12 @@ export class Lightclient {
// Check if we have a sync committee for the current clock period
if (!this.lightclientSpec.store.syncCommittees.has(currentPeriod)) {
// Stop head tracking
if (this.status.code === RunStatusCode.started) {
this.status.controller.abort();
if (this.runStatus.code === RunStatusCode.started) {
this.runStatus.controller.abort();
}
// Go into sync mode
this.status = {code: RunStatusCode.syncing};
this.updateRunStatus({code: RunStatusCode.syncing});
const headPeriod = computeSyncPeriodAtSlot(this.getHead().beacon.slot);
this.logger.debug("Syncing", {lastPeriod: headPeriod, currentPeriod});
@@ -243,9 +254,9 @@ export class Lightclient {
}
// After successfully syncing, track head if not already
if (this.status.code !== RunStatusCode.started) {
if (this.runStatus.code !== RunStatusCode.started) {
const controller = new AbortController();
this.status = {code: RunStatusCode.started, controller};
this.updateRunStatus({code: RunStatusCode.started, controller});
this.logger.debug("Started tracking the head");
this.transport.onOptimisticUpdate(this.processOptimisticUpdate.bind(this));
@@ -268,11 +279,11 @@ export class Lightclient {
}
// Wait for the next epoch
if (this.status.code !== RunStatusCode.started) {
if (this.runStatus.code !== RunStatusCode.started) {
return;
} else {
try {
await sleep(timeUntilNextEpoch(this.config, this.genesisTime), this.status.controller.signal);
await sleep(timeUntilNextEpoch(this.config, this.genesisTime), this.runStatus.controller.signal);
} catch (e) {
if (isErrorAborted(e)) {
return;
@@ -306,4 +317,9 @@ export class Lightclient {
private currentSlotWithTolerance(): Slot {
return slotWithFutureTolerance(this.config, this.genesisTime, MAX_CLOCK_DISPARITY_SEC);
}
private updateRunStatus(runStatus: RunStatus): void {
this.runStatus = runStatus;
this.emitter.emit(LightclientEvent.statusChange, this.runStatus.code);
}
}

View File

@@ -106,7 +106,7 @@ describe("sync", () => {
// Sync periods to current
await new Promise<void>((resolve) => {
lightclient.emitter.on(LightclientEvent.lightClientFinalityUpdate, (header) => {
lightclient.emitter.on(LightclientEvent.lightClientFinalityHeader, (header) => {
if (computeSyncPeriodAtSlot(header.beacon.slot) >= targetPeriod) {
resolve();
}
@@ -128,7 +128,7 @@ describe("sync", () => {
// Track head + reference states with some known data
const syncCommittee = getInteropSyncCommittee(targetPeriod);
await new Promise<void>((resolve) => {
lightclient.emitter.on(LightclientEvent.lightClientOptimisticUpdate, (header) => {
lightclient.emitter.on(LightclientEvent.lightClientOptimisticHeader, (header) => {
if (header.beacon.slot === targetSlot) {
resolve();
}

View File

@@ -13,6 +13,7 @@ export {
ForkExecution,
ForkBlobs,
isForkExecution,
isForkWithdrawals,
isForkBlobs,
isForkLightClient,
} from "./forkName.js";

10
packages/prover/.gitignore vendored Normal file
View File

@@ -0,0 +1,10 @@
node_modules/
lib
.nyc_output/
coverage/**
.DS_Store
*.swp
.idea
yarn-error.log
package-lock.json
dist*

View File

@@ -0,0 +1,8 @@
colors: true
timeout: 2000
exit: true
extension: ["ts"]
require:
- ./test/setup.ts
node-option:
- "loader=ts-node/esm"

View File

@@ -0,0 +1,3 @@
{
"extends": "../../.nycrc.json"
}

201
packages/prover/LICENSE Normal file
View File

@@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

68
packages/prover/README.md Normal file
View File

@@ -0,0 +1,68 @@
# Lodestar Eth Consensus Lightclient Prover
[![Discord](https://img.shields.io/discord/593655374469660673.svg?label=Discord&logo=discord)](https://discord.gg/aMxzVcr)
[![ETH Beacon APIs Spec v2.1.0](https://img.shields.io/badge/ETH%20beacon--APIs-2.1.0-blue)](https://github.com/ethereum/beacon-APIs/releases/tag/v2.1.0)
![ES Version](https://img.shields.io/badge/ES-2020-yellow)
![Node Version](https://img.shields.io/badge/node-18.x-green)
> This package is part of [ChainSafe's Lodestar](https://lodestar.chainsafe.io) project
Typescript REST client for the [Ethereum Consensus API spec](https://github.com/ethereum/beacon-apis)
## Usage
You can use the `@lodestar/prover` in two ways, as a Web3 Provider and as proxy. For prover use case see below example.
```ts
import Web3 from "web3";
import {createVerifiedExecutionProvider, LCTransport} from "@lodestar/prover";
const {provider, proofProvider} = createVerifiedExecutionProvider(
new Web3.providers.HttpProvider("https://lodestar-sepoliarpc.chainsafe.io"), {
transport: LCTransport.Rest,
urls: ["https://lodestar-sepolia.chainsafe.io"],
network: "sepolia",
wsCheckpoint: "trusted-checkpoint"
});
const web3 = new Web3(provider);
const address = "0xf97e180c050e5Ab072211Ad2C213Eb5AEE4DF134";
const balance = await web3.eth.getBalance(address, "latest");
console.log({balance, address});
```
You can also invoke the package as binary.
```bash
npm -i g @lodestar/prover
lodestar-prover start \
--network sepolia \
--execution-rpc https://lodestar-sepoliarpc.chainsafe.io \
--mode rest \
--beacon-rpc https://lodestar-sepolia.chainsafe.io \
--port 8080
```
## Prerequisites
- [Lerna](https://github.com/lerna/lerna)
- [Yarn](https://yarnpkg.com/)
## What you need
You will need to go over the [specification](https://github.com/ethereum/beacon-apis). You will also need to have a [basic understanding of lightclient](https://github.com/ethereum/consensus-specs/blob/dev/specs/altair/light-client/light-client.md).
## Getting started
- Follow the [installation guide](https://chainsafe.github.io/lodestar/) to install Lodestar.
- Quickly try out the whole stack by [starting a local testnet](https://chainsafe.github.io/lodestar/usage/local).
## Contributors
Read our [contributors document](/CONTRIBUTING.md), [submit an issue](https://github.com/ChainSafe/lodestar/issues/new/choose) or talk to us on our [discord](https://discord.gg/yjyvFRP)!
## License
Apache-2.0 [ChainSafe Systems](https://chainsafe.io)

View File

@@ -0,0 +1,3 @@
#!/usr/bin/env node
await import("../lib/cli/index.js");

View File

@@ -0,0 +1,9 @@
const karmaConfig = require("../../karma.base.config.js");
const webpackConfig = require("./webpack.test.config.cjs");
module.exports = function karmaConfigurator(config) {
config.set({
...karmaConfig,
webpack: webpackConfig,
});
};

View File

@@ -0,0 +1,89 @@
{
"name": "@lodestar/prover",
"description": "A Typescript implementation of the Ethereum Consensus light client",
"license": "Apache-2.0",
"author": "ChainSafe Systems",
"homepage": "https://github.com/ChainSafe/lodestar#readme",
"repository": {
"type": "git",
"url": "git+https://github.com:ChainSafe/lodestar.git"
},
"bugs": {
"url": "https://github.com/ChainSafe/lodestar/issues"
},
"version": "1.4.3",
"type": "module",
"exports": {
".": {
"import": "./lib/index.js",
"browser": "./lib/index.web.js"
}
},
"bin": {
"lodestar-prover": "lib/cli/index.js"
},
"typesVersions": {
"*": {
"*": [
"*",
"lib/*",
"lib/*/index"
]
}
},
"types": "./lib/index.d.ts",
"files": [
"lib/**/*.d.ts",
"lib/**/*.js",
"lib/**/*.js.map",
"*.d.ts",
"*.js"
],
"scripts": {
"clean": "rm -rf lib && rm -f *.tsbuildinfo",
"build": "tsc -p tsconfig.build.json",
"build:release": "yarn clean && yarn run build",
"check-build": "node -e \"(async function() { await import('./lib/index.js') })()\"",
"check-types": "tsc",
"coverage": "codecov -F lodestar-api",
"lint": "eslint --color --ext .ts src/ test/",
"lint:fix": "yarn run lint --fix",
"pretest": "yarn run check-types",
"test": "yarn test:unit && yarn test:e2e",
"test:unit": "nyc --cache-dir .nyc_output/.cache -e .ts mocha 'test/unit/**/*.test.ts'",
"test:browsers": "yarn karma start karma.config.cjs",
"test:e2e": "mocha 'test/e2e/**/*.test.ts'",
"check-readme": "typescript-docs-verifier"
},
"dependencies": {
"@lodestar/api": "^1.5.0",
"@lodestar/types": "^1.5.0",
"@lodestar/utils": "^1.5.0",
"@lodestar/config": "^1.5.0",
"@lodestar/light-client": "^1.5.0",
"@ethereumjs/trie": "^5.0.4",
"@ethereumjs/util": "^8.0.5",
"@ethereumjs/rlp": "^4.0.1",
"ethereum-cryptography": "^1.2.0",
"http-proxy": "^1.18.1",
"yargs": "^16.1.0",
"source-map-support": "^0.5.19",
"find-up": "^5.0.0"
},
"devDependencies": {
"@types/http-proxy": "^1.17.10",
"@types/yargs": "^15.0.9",
"web3": "^1.8.2",
"ethers": "^6.1.0"
},
"peerDependencies": {
},
"keywords": [
"ethereum",
"eth-consensus",
"beacon",
"api",
"blockchain",
"prover"
]
}

View File

@@ -0,0 +1,54 @@
// Must not use `* as yargs`, see https://github.com/yargs/yargs/issues/1131
import yargs from "yargs";
// @ts-expect-error no type
import {hideBin} from "yargs/helpers";
import {registerCommandToYargs} from "../utils/command.js";
import {getVersionData} from "../utils/version.js";
import {cmds} from "./cmds/index.js";
const {version} = getVersionData();
const topBanner = `🌟 Lodestar Prover Proxy: Ethereum RPC proxy for RPC responses, verified against the trusted block hashes.
* Version: ${version}
* by ChainSafe Systems, 2018-2022`;
const bottomBanner = `📖 For more information, check the CLI reference:
* https://chainsafe.github.io/lodestar/reference/cli
✍️ Give feedback and report issues on GitHub:
* https://github.com/ChainSafe/lodestar`;
export const yarg = yargs((hideBin as (args: string[]) => string[])(process.argv));
/**
* Common factory for running the CLI and running integration tests
* The CLI must actually be executed in a different script
*/
export function getLodestarProverCli(): yargs.Argv {
const prover = yarg
.env("LODESTAR")
.parserConfiguration({
// As of yargs v16.1.0 dot-notation breaks strictOptions()
// Manually processing options is typesafe tho more verbose
"dot-notation": false,
})
// blank scriptName so that help text doesn't display the cli name before each command
.scriptName("")
.demandCommand(1)
// Control show help behaviour below on .fail()
.showHelpOnFail(false)
.usage(topBanner)
.epilogue(bottomBanner)
.version(topBanner)
.alias("h", "help")
.alias("v", "version")
.recommendCommands();
// yargs.command and all ./cmds
for (const cmd of cmds) {
registerCommandToYargs(prover, cmd);
}
// throw an error if we see an unrecognized cmd
prover.recommendCommands().strict();
return prover;
}

View File

@@ -0,0 +1,3 @@
import {proverProxyStartCommand} from "./start/index.js";
export const cmds = [proverProxyStartCommand];

View File

@@ -0,0 +1,27 @@
import {LCTransport} from "../../../interfaces.js";
import {createVerifiedExecutionProxy, VerifiedProxyOptions} from "../../../web3_proxy.js";
import {stdLogger} from "../../../utils/logger.js";
import {parseStartArgs, StartArgs} from "./options.js";
/**
* Runs a beacon node.
*/
export async function proverProxyStartHandler(args: StartArgs): Promise<void> {
const opts = parseStartArgs(args);
const {network, executionRpcUrl, port, wsCheckpoint} = opts;
const options: VerifiedProxyOptions = {
logger: stdLogger,
network,
executionRpcUrl,
wsCheckpoint,
...(opts.transport === LCTransport.Rest
? {transport: LCTransport.Rest, urls: opts.urls}
: {transport: LCTransport.P2P, bootnodes: opts.bootnodes}),
};
const {server, proofProvider} = createVerifiedExecutionProxy(options);
server.listen(port);
await proofProvider.waitToBeReady();
}

View File

@@ -0,0 +1,17 @@
import {CliCommand, CliCommandOptions} from "../../../utils/command.js";
import {proverProxyStartHandler} from "./handler.js";
import {StartArgs, startOptions} from "./options.js";
export const proverProxyStartCommand: CliCommand<StartArgs> = {
command: "start",
describe: "Start proxy server",
examples: [
{
command:
"start --network sepolia --execution-rpc https://lodestar-sepoliarpc.chainsafe.io --mode rest --beacon-rpc https://lodestar-sepolia.chainsafe.io",
description: "Start a proxy server and connect to the sepolia testnet",
},
],
options: startOptions as CliCommandOptions<StartArgs>,
handler: proverProxyStartHandler,
};

View File

@@ -0,0 +1,78 @@
import {NetworkName, networksChainConfig} from "@lodestar/config/networks";
import {LCTransport} from "../../../interfaces.js";
import {CliCommandOptions} from "../../../utils/command.js";
export type StartArgs = {
port: number;
network: string;
"execution-rpc-url": string;
transport: "rest" | "p2p";
"beacon-urls"?: string[];
"beacon-bootnodes"?: string[];
"ws-checkpoint"?: string;
};
export type StartOptions = {
network: NetworkName;
executionRpcUrl: string;
port: number;
wsCheckpoint?: string;
} & ({transport: LCTransport.Rest; urls: string[]} | {transport: LCTransport.P2P; bootnodes: string[]});
export const startOptions: CliCommandOptions<StartArgs> = {
port: {
description: "Port number to start the proxy.",
type: "number",
default: 8080,
},
network: {
description: "Specify the network to connect.",
type: "string",
choices: Object.keys(networksChainConfig),
},
"execution-rpc-url": {
description: "RPC url for the execution node.",
type: "string",
},
transport: {
description: "The Light client mode to connect to. 'rest', 'p2p'",
type: "string",
choices: ["rest", "p2p"],
},
"beacon-urls": {
description: "The beacon node PRC urls for 'rest' mode.",
type: "string",
array: true,
demandOption: false,
},
"beacon-bootnodes": {
description: "The beacon node PRC urls for 'p2p' mode.",
type: "string",
array: true,
demandOption: false,
},
"ws-checkpoint": {
description:
"The trusted checkpoint root to start the lightclient. If not provided will initialize from the latest finalized slot. It shouldn't be older than weak subjectivity period",
type: "string",
},
};
export function parseStartArgs(args: StartArgs): StartOptions {
// Remove undefined values to allow deepmerge to inject default values downstream
return {
port: args["port"],
network: args["network"] as NetworkName,
executionRpcUrl: args["execution-rpc-url"],
transport: args["transport"] === "p2p" ? LCTransport.P2P : LCTransport.Rest,
urls: args["transport"] === "rest" ? args["beacon-urls"] ?? [] : [],
bootnodes: args["transport"] === "p2p" ? args["beacon-bootnodes"] ?? [] : [],
wsCheckpoint: args["ws-checkpoint"],
};
}

View File

@@ -0,0 +1,30 @@
#!/usr/bin/env node
// MUST import first to apply preset from args
import {YargsError} from "../utils/errors.js";
import {getLodestarProverCli, yarg} from "./cli.js";
import "source-map-support/register.js";
const prover = getLodestarProverCli();
prover
.fail((msg, err) => {
if (msg) {
// Show command help message when no command is provided
if (msg.includes("Not enough non-option arguments")) {
yarg.showHelp();
// eslint-disable-next-line no-console
console.log("\n");
}
}
const errorMessage =
err !== undefined ? (err instanceof YargsError ? err.message : err.stack) : msg || "Unknown error";
// eslint-disable-next-line no-console
console.error(`${errorMessage}\n`);
process.exit(1);
})
// Execute CLI
.parse();

View File

@@ -0,0 +1,4 @@
// https://github.com/ethereum/consensus-specs/blob/dev/specs/altair/light-client/p2p-interface.md#configuration
export const MAX_REQUEST_LIGHT_CLIENT_UPDATES = 128;
export const MAX_PAYLOAD_HISTORY = 32;
export const UNVERIFIED_RESPONSE_CODE = -33091;

View File

@@ -0,0 +1,3 @@
export * from "./interfaces.js";
export {createVerifiedExecutionProvider} from "./web3_provider.js";
export {createVerifiedExecutionProxy} from "./web3_proxy.js";

View File

@@ -0,0 +1,2 @@
export * from "./interfaces.js";
export {createVerifiedExecutionProvider} from "./web3_provider.js";

View File

@@ -0,0 +1,51 @@
import {ChainForkConfig} from "@lodestar/config";
import {NetworkName} from "@lodestar/config/networks";
import {ProofProvider} from "./proof_provider/proof_provider.js";
import {ELRequestPayload, ELResponse} from "./types.js";
export enum LCTransport {
Rest = "Rest",
P2P = "P2P",
}
export type RootProviderInitOptions = {
network: NetworkName;
signal: AbortSignal;
config?: ChainForkConfig;
wsCheckpoint?: string;
} & ({transport: LCTransport.Rest; urls: string[]} | {transport: LCTransport.P2P; bootnodes: string[]});
export type ELRequestMethod = (payload: ELRequestPayload) => Promise<ELResponse | undefined>;
// Modern providers uses this structure e.g. Web3 4.x
export interface EIP1193Provider {
request: (payload: ELRequestPayload) => Promise<ELResponse>;
}
// Some providers uses `request` instead of the `send`. e.g. Ganache
export interface RequestProvider {
request(payload: ELRequestPayload, callback: (err: Error | undefined, response: ELResponse) => void): void;
}
// The legacy Web3 1.x use this structure
export interface SendProvider {
send(payload: ELRequestPayload, callback: (err?: Error | null, response?: ELResponse) => void): void;
}
// Ethers provider uses this structure
export interface EthersProvider {
send(method: string, params: Array<unknown>): Promise<ELResponse>;
}
// Some legacy providers use this very old structure
export interface SendAsyncProvider {
sendAsync(payload: ELRequestPayload): Promise<ELResponse>;
}
export type Web3Provider = SendProvider | EthersProvider | SendAsyncProvider | RequestProvider | EIP1193Provider;
export type ELVerifiedRequestHandler<A = unknown, R = unknown> = (opts: {
payload: ELRequestPayload<A>;
handler: ELRequestMethod;
rootProvider: ProofProvider;
}) => Promise<ELResponse<R>>;

View File

@@ -0,0 +1,25 @@
export class OrderedMap<T> extends Map<number, T> {
private _min = 0;
private _max = 0;
get min(): number {
return this._min;
}
get max(): number {
return this._max;
}
set(key: number, value: T): this {
if (key < this._min) {
this._min = key;
}
if (key > this._max) {
this._max = key;
}
super.set(key, value);
return this;
}
}

View File

@@ -0,0 +1,184 @@
import {Api} from "@lodestar/api";
import {allForks, capella} from "@lodestar/types";
import {MAX_PAYLOAD_HISTORY} from "../constants.js";
import {getExecutionPayloadForBlockNumber, getExecutionPayloads} from "../utils/consensus.js";
import {bufferToHex, hexToNumber} from "../utils/conversion.js";
import {OrderedMap} from "./ordered_map.js";
type BlockELRoot = string;
type BlockCLRoot = string;
/**
* The in-memory store for the execution payloads to be used to verify the proofs
*/
export class PayloadStore {
// We store the block numbers only for finalized blocks
private finalizedRoots = new OrderedMap<BlockELRoot>();
// Unfinalized blocks are stored by the roots of the beacon chain
private unfinalizedRoots = new Map<BlockCLRoot, BlockELRoot>();
// Payloads store with BlockELRoot as key
private payloads = new Map<BlockELRoot, allForks.ExecutionPayload>();
private latestBlockRoot: BlockELRoot | null = null;
constructor(private opts: {api: Api}) {}
get finalized(): allForks.ExecutionPayload | undefined {
const finalizedMaxRoot = this.finalizedRoots.get(this.finalizedRoots.max);
if (finalizedMaxRoot) {
return this.payloads.get(finalizedMaxRoot);
}
return undefined;
}
get latest(): allForks.ExecutionPayload | undefined {
if (this.latestBlockRoot) {
return this.payloads.get(this.latestBlockRoot);
}
return undefined;
}
async get(blockId: number | string): Promise<allForks.ExecutionPayload | undefined> {
// Given block id is a block hash in hex (32 bytes root takes 64 hex chars + 2 for 0x prefix)
if (typeof blockId === "string" && blockId.startsWith("0x") && blockId.length === 64 + 2) {
return this.payloads.get(blockId);
}
// Given block id is a block number in hex
if (typeof blockId === "string" && blockId.startsWith("0x")) {
return this.getOrFetchFinalizedPayload(hexToNumber(blockId));
}
// Given block id is a block number in decimal string
if (typeof blockId === "string" && !blockId.startsWith("0x")) {
return this.getOrFetchFinalizedPayload(parseInt(blockId, 10));
}
// Given block id is a block number in decimal
if (typeof blockId === "number") {
return this.getOrFetchFinalizedPayload(blockId);
}
return undefined;
}
async getOrFetchFinalizedPayload(blockNumber: number): Promise<allForks.ExecutionPayload | undefined> {
if (blockNumber > this.finalizedRoots.max) {
throw new Error(
`Block number ${blockNumber} is higher than the latest finalized block number. We recommend to use block hash for unfinalized blocks.`
);
}
let blockELRoot = this.finalizedRoots.get(blockNumber);
// check if we have payload cached locally else fetch from api
if (!blockELRoot) {
const payloads = await getExecutionPayloadForBlockNumber(this.opts.api, this.finalizedRoots.min, blockNumber);
for (const payload of Object.values(payloads)) {
this.set(payload, true);
}
}
blockELRoot = this.finalizedRoots.get(blockNumber);
if (blockELRoot) {
return this.payloads.get(blockELRoot);
}
return undefined;
}
set(payload: allForks.ExecutionPayload, finalized: boolean): void {
const blockRoot = bufferToHex(payload.blockHash);
this.payloads.set(blockRoot, payload);
if (this.latestBlockRoot) {
const latestPayload = this.payloads.get(this.latestBlockRoot);
if (latestPayload && latestPayload.blockNumber < payload.blockNumber) {
this.latestBlockRoot = blockRoot;
}
}
if (finalized) {
this.finalizedRoots.set(payload.blockNumber, blockRoot);
}
}
async processLCHeader(header: capella.LightClientHeader, finalized = false): Promise<void> {
const blockSlot = header.beacon.slot;
const blockNumber = header.execution.blockNumber;
const blockELRoot = bufferToHex(header.execution.blockHash);
const blockCLRoot = bufferToHex(header.beacon.stateRoot);
const existingELRoot = this.unfinalizedRoots.get(blockCLRoot);
// ==== Finalized blocks ====
// if the block is finalized, we need to update the finalizedRoots map
if (finalized) {
this.finalizedRoots.set(blockNumber, blockELRoot);
// If the block is finalized and we already have the payload
// We can remove it from the unfinalizedRoots map and do nothing else
if (existingELRoot) {
this.unfinalizedRoots.delete(blockCLRoot);
}
// If the block is finalized and we do not have the payload
// We need to fetch and set the payload
else if (finalized && !existingELRoot) {
this.payloads.set(
bufferToHex(header.execution.blockHash),
(await getExecutionPayloads(this.opts.api, blockSlot, blockSlot))[blockSlot]
);
}
return;
}
// ==== Unfinalized blocks ====
// We already have the payload for this block
if (existingELRoot && existingELRoot === blockELRoot) {
return;
}
// Re-org happened, we need to update the payload
if (existingELRoot && existingELRoot !== blockELRoot) {
this.payloads.delete(existingELRoot);
this.unfinalizedRoots.set(blockCLRoot, blockELRoot);
}
// We do not have the payload for this block, we need to fetch it
const payload = (await getExecutionPayloads(this.opts.api, blockSlot, blockSlot))[blockSlot];
this.set(payload, false);
this.prune();
}
private prune(): void {
if (this.finalizedRoots.size <= MAX_PAYLOAD_HISTORY) return;
for (
let blockNumber = this.finalizedRoots.max - MAX_PAYLOAD_HISTORY;
blockNumber > this.finalizedRoots.min;
blockNumber--
) {
const blockELRoot = this.finalizedRoots.get(blockNumber);
if (blockELRoot) {
this.payloads.delete(blockELRoot);
this.finalizedRoots.delete(blockNumber);
}
}
for (const [clRoot, elRoot] of this.unfinalizedRoots) {
const payload = this.payloads.get(elRoot);
if (!payload) {
this.unfinalizedRoots.delete(clRoot);
continue;
}
if (payload.blockNumber < this.finalizedRoots.min) {
this.unfinalizedRoots.delete(clRoot);
}
}
}
}

View File

@@ -0,0 +1,189 @@
import {Api, getClient} from "@lodestar/api/beacon";
import {ChainForkConfig, createChainForkConfig} from "@lodestar/config";
import {networksChainConfig} from "@lodestar/config/networks";
import {Lightclient, LightclientEvent, RunStatusCode} from "@lodestar/light-client";
import {LightClientRestTransport} from "@lodestar/light-client/transport";
import {isForkWithdrawals} from "@lodestar/params";
import {allForks, capella} from "@lodestar/types";
import {LCTransport, RootProviderInitOptions} from "../interfaces.js";
import {assertLightClient} from "../utils/assertion.js";
import {
getExecutionPayloads,
getGenesisData,
getSyncCheckpoint,
getUnFinalizedRangeForPayloads,
} from "../utils/consensus.js";
import {PayloadStore} from "./payload_store.js";
type RootProviderOptions = Omit<RootProviderInitOptions, "transport"> & {
transport: LightClientRestTransport;
api: Api;
config: ChainForkConfig;
};
export class ProofProvider {
private store: PayloadStore;
// Make sure readyPromise doesn't throw unhandled exceptions
private readyPromise?: Promise<void>;
lightClient?: Lightclient;
constructor(private opts: RootProviderOptions) {
this.store = new PayloadStore({api: opts.api});
}
async waitToBeReady(): Promise<void> {
return this.readyPromise;
}
static init(opts: RootProviderInitOptions): ProofProvider {
if (opts.transport === LCTransport.P2P) {
throw new Error("P2P mode not supported yet");
}
const config = createChainForkConfig(networksChainConfig[opts.network]);
const api = getClient({urls: opts.urls}, {config});
const transport = new LightClientRestTransport(api);
const provider = new ProofProvider({
...opts,
config,
api,
transport,
});
provider.readyPromise = provider.sync(opts.wsCheckpoint).catch((e) => {
// TODO: will be replaced by logger in the next PR.
// eslint-disable-next-line no-console
console.error("Error while syncing", e);
return Promise.reject("Error while syncing");
});
return provider;
}
private async sync(wsCheckpoint?: string): Promise<void> {
if (this.lightClient !== undefined) {
throw Error("Light client already initialized and syncing.");
}
const {api, config, transport} = this.opts;
const checkpointRoot = await getSyncCheckpoint(api, wsCheckpoint);
const genesisData = await getGenesisData(api);
this.lightClient = await Lightclient.initializeFromCheckpointRoot({
checkpointRoot,
config,
transport,
genesisData,
});
assertLightClient(this.lightClient);
// Wait for the lightclient to start
await new Promise<void>((resolve) => {
const lightClientStarted = (status: RunStatusCode): void => {
if (status === RunStatusCode.started) {
this.lightClient?.emitter.off(LightclientEvent.statusChange, lightClientStarted);
resolve();
}
};
this.lightClient?.emitter.on(LightclientEvent.statusChange, lightClientStarted);
this.lightClient?.start();
});
this.registerEvents();
// Load the payloads from the CL
const {start, end} = await getUnFinalizedRangeForPayloads(this.lightClient);
const payloads = await getExecutionPayloads(this.opts.api, start, end);
for (const payload of Object.values(payloads)) {
this.store.set(payload, false);
}
// Load the finalized payload from the CL
const finalizedSlot = this.lightClient.getFinalized().beacon.slot;
const finalizedPayload = await getExecutionPayloads(this.opts.api, finalizedSlot, finalizedSlot);
this.store.set(finalizedPayload[finalizedSlot], true);
}
getStatus(): {latest: number; finalized: number; status: RunStatusCode} {
if (!this.lightClient) {
return {
latest: 0,
finalized: 0,
status: RunStatusCode.uninitialized,
};
}
return {
latest: this.lightClient.getHead().beacon.slot,
finalized: this.lightClient.getFinalized().beacon.slot,
status: this.lightClient.status,
};
}
async getExecutionPayload(blockNumber: number | string | "finalized" | "latest"): Promise<allForks.ExecutionPayload> {
assertLightClient(this.lightClient);
if (typeof blockNumber === "string" && blockNumber === "finalized") {
const payload = this.store.finalized;
if (!payload) throw new Error("No finalized payload");
return payload;
}
if (typeof blockNumber === "string" && blockNumber === "latest") {
const payload = this.store.latest;
if (!payload) throw new Error("No latest payload");
return payload;
}
if ((typeof blockNumber === "string" && blockNumber.startsWith("0x")) || typeof blockNumber === "number") {
const payload = await this.store.get(blockNumber);
if (!payload) throw new Error(`No payload for blockNumber ${blockNumber}`);
return payload;
}
throw new Error(`Invalid blockNumber "${blockNumber}"`);
}
async processLCHeader(lcHeader: allForks.LightClientHeader, finalized = false): Promise<void> {
const fork = this.opts.config.getForkName(lcHeader.beacon.slot);
if (!isForkWithdrawals(fork)) {
return;
}
const sszType = this.opts.config.getExecutionForkTypes(lcHeader.beacon.slot).ExecutionPayloadHeader;
if (
isForkWithdrawals(fork) &&
(!("execution" in lcHeader) || sszType.equals(lcHeader.execution, sszType.defaultValue()))
) {
throw new Error("Execution payload is required for execution fork");
}
await this.store.processLCHeader(lcHeader as capella.LightClientHeader, finalized);
}
private registerEvents(): void {
assertLightClient(this.lightClient);
this.opts.signal.addEventListener("abort", () => {
this.lightClient?.stop();
});
this.lightClient.emitter.on(LightclientEvent.lightClientFinalityHeader, async (data) => {
await this.processLCHeader(data, true).catch((e) => {
// Will be replaced with logger in next PR.
// eslint-disable-next-line no-console
console.error(e);
});
});
this.lightClient.emitter.on(LightclientEvent.lightClientOptimisticHeader, async (data) => {
await this.processLCHeader(data).catch((e) => {
// Will be replaced with logger in next PR.
// eslint-disable-next-line no-console
console.error(e);
});
});
}
}

View File

@@ -0,0 +1,35 @@
export interface ELRequestPayload<T = unknown[]> {
readonly jsonrpc: string & ("2.0" | "1.0");
readonly id: number | string;
readonly method: string;
readonly params: T;
readonly requestOptions?: unknown;
}
// Make the very flexible el response type to match different libraries easily
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export type ELResponse<T = any, E = any> = {
readonly id: number | string;
jsonrpc: string;
result?: T;
error?: {
readonly code?: number;
readonly data?: E;
readonly message: string;
};
};
export interface ELProof {
readonly address: string;
readonly balance: string;
readonly codeHash: string;
readonly nonce: string;
readonly storageHash: string;
readonly accountProof: string[];
readonly storageProof: {
readonly key: string;
readonly value: string;
readonly proof: string[];
}[];
}
export type ELStorageProof = Pick<ELProof, "storageHash" | "storageProof">;
export type HexString = string;

View File

@@ -0,0 +1,53 @@
import {Lightclient} from "@lodestar/light-client";
import {
EIP1193Provider,
EthersProvider,
RequestProvider,
SendAsyncProvider,
SendProvider,
Web3Provider,
} from "../interfaces.js";
export function assertLightClient(client?: Lightclient): asserts client is Lightclient {
if (!client) {
throw new Error("Light client is not initialized yet.");
}
}
export function isSendProvider(provider: Web3Provider): provider is SendProvider {
return (
"send" in provider &&
typeof provider.send === "function" &&
provider.send.length > 1 &&
provider.send.constructor.name !== "AsyncFunction"
);
}
export function isEthersProvider(provider: Web3Provider): provider is EthersProvider {
return (
"send" in provider &&
typeof provider.send === "function" &&
provider.send.length > 1 &&
provider.send.constructor.name === "AsyncFunction"
);
}
export function isRequestProvider(provider: Web3Provider): provider is RequestProvider {
return "request" in provider && typeof provider.request === "function" && provider.request.length > 1;
}
export function isSendAsyncProvider(provider: Web3Provider): provider is SendAsyncProvider {
return (
"sendAsync" in provider &&
typeof provider.sendAsync === "function" &&
provider.sendAsync.constructor.name === "AsyncFunction"
);
}
export function isEIP1193Provider(provider: Web3Provider): provider is EIP1193Provider {
return (
"request" in provider &&
typeof provider.request === "function" &&
provider.request.constructor.name === "AsyncFunction"
);
}

View File

@@ -0,0 +1,43 @@
import {Options, Argv} from "yargs";
export type CliCommandOptions<OwnArgs> = Required<{[key in keyof OwnArgs]: Options}>;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export interface CliCommand<OwnArgs = Record<never, never>, ParentArgs = Record<never, never>, R = any> {
command: string;
describe: string;
examples?: {command: string; description: string}[];
options?: CliCommandOptions<OwnArgs>;
// 1st arg: any = free own sub command options
// 2nd arg: subcommand parent options is = to this command options + parent options
// eslint-disable-next-line @typescript-eslint/no-explicit-any
subcommands?: CliCommand<any, OwnArgs & ParentArgs>[];
handler?: (args: OwnArgs & ParentArgs) => Promise<R>;
}
/**
* Register a CliCommand type to yargs. Recursively registers subcommands too.
* @param yargs
* @param cliCommand
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export function registerCommandToYargs(yargs: Argv, cliCommand: CliCommand<any, any>): void {
yargs.command({
command: cliCommand.command,
describe: cliCommand.describe,
builder: (yargsBuilder) => {
yargsBuilder.options(cliCommand.options || {});
for (const subcommand of cliCommand.subcommands || []) {
registerCommandToYargs(yargsBuilder, subcommand);
}
if (cliCommand.examples) {
for (const example of cliCommand.examples) {
yargsBuilder.example(`$0 ${example.command}`, example.description);
}
}
return yargs;
},
// eslint-disable-next-line @typescript-eslint/no-empty-function
handler: cliCommand.handler || function emptyHandler(): void {},
});
}

View File

@@ -0,0 +1,105 @@
import {Api} from "@lodestar/api/beacon";
import {allForks, Bytes32, capella} from "@lodestar/types";
import {GenesisData, Lightclient} from "@lodestar/light-client";
import {ApiError} from "@lodestar/api";
import {MAX_PAYLOAD_HISTORY} from "../constants.js";
import {hexToBuffer} from "./conversion.js";
export async function fetchNearestBlock(
api: Api,
slot: number,
direction: "up" | "down" = "down"
): Promise<capella.SignedBeaconBlock> {
const res = await api.beacon.getBlockV2(slot);
if (res.ok) return res.response.data;
if (!res.ok && res.error.code === 404) {
return fetchNearestBlock(api, direction === "down" ? slot - 1 : slot + 1);
}
throw new Error(`Can not fetch nearest block for slot=${slot}`);
}
export async function getUnFinalizedRangeForPayloads(lightClient: Lightclient): Promise<{start: number; end: number}> {
const headSlot = lightClient.getHead().beacon.slot;
const finalizeSlot = lightClient.getFinalized().beacon.slot;
const endSlot = headSlot - MAX_PAYLOAD_HISTORY;
return {
start: headSlot,
end: endSlot < finalizeSlot ? finalizeSlot : endSlot,
};
}
export async function getExecutionPayloads(
api: Api,
startSlot: number,
endSlot: number
): Promise<Record<number, allForks.ExecutionPayload>> {
[startSlot, endSlot] = [Math.min(startSlot, endSlot), Math.max(startSlot, endSlot)];
const payloads: Record<number, allForks.ExecutionPayload> = {};
let slot = endSlot;
let block = (await fetchNearestBlock(api, slot, "down")) as capella.SignedBeaconBlock;
payloads[block.message.slot] = block.message.body.executionPayload;
while (slot >= startSlot) {
const previousBlock = (await fetchNearestBlock(api, block.message.slot - 1, "down")) as capella.SignedBeaconBlock;
if (block.message.body.executionPayload.parentHash === previousBlock.message.body.executionPayload.blockHash) {
payloads[block.message.slot] = block.message.body.executionPayload;
}
slot = block.message.slot - 1;
block = previousBlock;
}
return payloads;
}
export async function getExecutionPayloadForBlockNumber(
api: Api,
startSlot: number,
blockNumber: number
): Promise<Record<number, allForks.ExecutionPayload>> {
const payloads: Record<number, allForks.ExecutionPayload> = {};
let block = (await fetchNearestBlock(api, startSlot, "down")) as capella.SignedBeaconBlock;
payloads[block.message.slot] = block.message.body.executionPayload;
while (payloads[block.message.slot].blockNumber !== blockNumber) {
const previousBlock = (await fetchNearestBlock(api, block.message.slot - 1, "down")) as capella.SignedBeaconBlock;
block = previousBlock;
}
return payloads;
}
export async function getGenesisData(api: Pick<Api, "beacon">): Promise<GenesisData> {
const res = await api.beacon.getGenesis();
ApiError.assert(res);
return {
genesisTime: Number(res.response.data.genesisTime),
genesisValidatorsRoot: res.response.data.genesisValidatorsRoot,
};
}
export async function getSyncCheckpoint(api: Pick<Api, "beacon">, checkpoint?: string): Promise<Bytes32> {
if (checkpoint && checkpoint.length !== 32) {
throw Error(`Checkpoint root must be 32 bytes long: ${checkpoint.length}`);
}
let syncCheckpoint: Uint8Array;
if (checkpoint) {
syncCheckpoint = hexToBuffer(checkpoint);
} else {
const res = await api.beacon.getStateFinalityCheckpoints("head");
ApiError.assert(res);
syncCheckpoint = res.response.data.finalized.root;
}
return syncCheckpoint;
}

View File

@@ -0,0 +1,21 @@
export function numberToHex(n: number | bigint): string {
return "0x" + n.toString(16);
}
export function hexToNumber(n: string): number {
return n.startsWith("0x") ? parseInt(n.slice(2), 16) : parseInt(n, 16);
}
export function bufferToHex(buffer: Buffer | Uint8Array): string {
return "0x" + Buffer.from(buffer).toString("hex");
}
export function hexToBuffer(v: string): Buffer {
return Buffer.from(v.replace("0x", ""), "hex");
}
export function padLeft(v: Uint8Array, length: number): Uint8Array {
const buf = Buffer.alloc(length);
Buffer.from(v).copy(buf, length - v.length);
return buf;
}

View File

@@ -0,0 +1,4 @@
/**
* Expected error that shouldn't print a stack trace
*/
export class YargsError extends Error {}

View File

@@ -0,0 +1,108 @@
import {RLP} from "@ethereumjs/rlp";
import {Trie} from "@ethereumjs/trie";
import {Account} from "@ethereumjs/util";
import {keccak256} from "ethereum-cryptography/keccak.js";
import {Bytes32} from "@lodestar/types";
import {ethGetBalance} from "../verified_requests/eth_getBalance.js";
import {ELRequestPayload, ELResponse, ELProof, ELStorageProof, HexString} from "../types.js";
import {ProofProvider} from "../proof_provider/proof_provider.js";
import {ELRequestMethod, ELVerifiedRequestHandler} from "../interfaces.js";
import {hexToBuffer, padLeft} from "./conversion.js";
const emptyAccountSerialize = new Account().serialize();
const storageKeyLength = 32;
// eslint-disable-next-line @typescript-eslint/naming-convention, @typescript-eslint/no-explicit-any
const supportedELRequests: Record<string, ELVerifiedRequestHandler<any, any>> = {eth_getBalance: ethGetBalance};
export async function processAndVerifyRequest({
payload,
handler,
proofProvider,
}: {
payload: ELRequestPayload;
handler: ELRequestMethod;
proofProvider: ProofProvider;
}): Promise<ELResponse | undefined> {
await proofProvider.waitToBeReady();
const verifiedHandler = supportedELRequests[payload.method];
if (verifiedHandler !== undefined) {
return verifiedHandler({payload, handler, rootProvider: proofProvider});
}
// eslint-disable-next-line no-console
console.warn(`Request handler for ${payload.method} is not implemented.`);
return handler(payload);
}
export async function getELProof(
handler: ELRequestMethod,
args: [address: string, storageKeys: string[], block: number | string]
): Promise<ELProof> {
// TODO: Find better way to generate random id
const proof = await handler({
jsonrpc: "2.0",
method: "eth_getProof",
params: args,
id: (Math.random() * 10000).toFixed(0),
});
if (!proof) {
throw new Error("Can not find proof for given address.");
}
return proof.result as ELProof;
}
export async function isValidAccount({
address,
stateRoot,
proof,
}: {
address: HexString;
stateRoot: Bytes32;
proof: ELProof;
}): Promise<boolean> {
const trie = await Trie.create();
const key = keccak256(hexToBuffer(address));
const expectedAccountRLP = await trie.verifyProof(
Buffer.from(stateRoot),
Buffer.from(key),
proof.accountProof.map(hexToBuffer)
);
// Shresth Agrawal (2022) Patronum source code. https://github.com/lightclients/patronum
const account = Account.fromAccountData({
nonce: BigInt(proof.nonce),
balance: BigInt(proof.balance),
storageRoot: proof.storageHash,
codeHash: proof.codeHash,
});
return account.serialize().equals(expectedAccountRLP ? expectedAccountRLP : emptyAccountSerialize);
}
export async function isValidStorageKeys({
storageKeys,
proof,
}: {
storageKeys: HexString[];
proof: ELStorageProof;
}): Promise<boolean> {
const trie = await Trie.create();
for (let i = 0; i < storageKeys.length; i++) {
const sp = proof.storageProof[i];
const key = keccak256(padLeft(hexToBuffer(storageKeys[i]), storageKeyLength));
const expectedStorageRLP = await trie.verifyProof(
hexToBuffer(proof.storageHash),
Buffer.from(key),
sp.proof.map(hexToBuffer)
);
const isStorageValid =
(!expectedStorageRLP && sp.value === "0x0") ||
(!!expectedStorageRLP && expectedStorageRLP.equals(RLP.encode(sp.value)));
if (!isStorageValid) return false;
}
return true;
}

View File

@@ -0,0 +1,49 @@
import path from "node:path";
import fs from "node:fs";
import {fileURLToPath} from "node:url";
// Global variable __dirname no longer available in ES6 modules.
// Solutions: https://stackoverflow.com/questions/46745014/alternative-for-dirname-in-node-js-when-using-es6-modules
// eslint-disable-next-line @typescript-eslint/naming-convention
const __dirname = path.dirname(fileURLToPath(import.meta.url));
// Persist git data and distribute through NPM so CLI consumers can know exactly
// at what commit was this src build. This is used in the metrics and to log initially.
//
// - For NPM release (stable): Only the version is persisted. Once must then track the version's tag
// in Github to resolve that version to a specific commit. While this is okay, git-data.json gives
// a gurantee of the exact commit at build time.
//
// - For NPM release (dev): canary commits include the commit, so this feature is not really
// necessary. However, it's more cumbersome to have conditional logic on stable / dev.
//
// - For build from source: .git folder is available in the context of the built code, so it can extract
// branch and commit directly without the need for .git-data.json.
//
// - For build from source dockerized: This feature is required to know the branch and commit, since
// git data is not persisted past the build. However, .dockerignore prevents .git folder from being
// copied into the container's context, so .git-data.json can't be generated.
/**
* WARNING!! If you change this path make sure to update:
* - 'packages/cli/package.json' -> .files -> `".git-data.json"`
*/
export const gitDataPath = path.resolve(__dirname, "../../../.git-data.json");
/** Git data type used to construct version information string and persistence. */
export type GitData = {
/** "developer-feature" */
branch: string;
/** "80c248bb392f512cc115d95059e22239a17bbd7d" */
commit: string;
};
/** Writes a persistent git data file. */
export function writeGitDataFile(gitData: GitData): void {
fs.writeFileSync(gitDataPath, JSON.stringify(gitData, null, 2));
}
/** Reads the persistent git data file. */
export function readGitDataFile(): GitData {
return JSON.parse(fs.readFileSync(gitDataPath, "utf8")) as GitData;
}

View File

@@ -0,0 +1,71 @@
import {execSync} from "node:child_process";
// This file is created in the build step and is distributed through NPM
// MUST be in sync with `-/gitDataPath.ts` and `package.json` files.
import {readGitDataFile, GitData} from "./gitDataPath.js";
/** Reads git data from a persisted file or local git data at build time. */
export function readAndGetGitData(): GitData {
try {
// Gets git data containing current branch and commit info from persistent file.
let persistedGitData: Partial<GitData>;
try {
persistedGitData = readGitDataFile();
} catch (e) {
persistedGitData = {};
}
const currentGitData = getGitData();
return {
// If the CLI is run from source, prioritze current git data
// over `.git-data.json` file, which might be stale here.
branch:
currentGitData.branch && currentGitData.branch.length > 0
? currentGitData.branch
: persistedGitData.branch ?? "",
commit:
currentGitData.commit && currentGitData.commit.length > 0
? currentGitData.commit
: persistedGitData.commit ?? "",
};
} catch (e) {
return {
branch: "",
commit: "",
};
}
}
/** Gets git data containing current branch and commit info from CLI. */
export function getGitData(): GitData {
return {
branch: process.env.GIT_BRANCH ?? getBranch(),
commit: process.env.GIT_COMMIT ?? getCommit(),
};
}
/** Tries to get branch from git CLI. */
function getBranch(): string {
try {
return shellSilent("git rev-parse --abbrev-ref HEAD");
} catch (e) {
return "";
}
}
/** Tries to get commit from git from git CLI. */
function getCommit(): string {
try {
return shellSilent("git rev-parse --verify HEAD");
} catch (e) {
return "";
}
}
/** Silent shell that won't pollute stdout, or stderr */
function shellSilent(cmd: string): string {
return execSync(cmd, {stdio: ["ignore", "pipe", "ignore"]})
.toString()
.trim();
}

View File

@@ -0,0 +1,10 @@
#!/usr/bin/env node
// For RATIONALE of this file, check packages/cli/src/util/gitData/gitDataPath.ts
// Persist exact commit in NPM distributions for easier tracking of the build
import {writeGitDataFile} from "./gitDataPath.js";
import {getGitData} from "./index.js";
// Script to write the git data file (json) used by the build procedures to persist git data.
writeGitDataFile(getGitData());

View File

@@ -0,0 +1,35 @@
import {UNVERIFIED_RESPONSE_CODE} from "../constants.js";
import {ELRequestPayload, ELResponse} from "../types.js";
export function generateRPCResponseForPayload<P, R, E = unknown>(
payload: ELRequestPayload<P>,
res?: R,
error?: {
readonly code?: number;
readonly data?: E;
readonly message: string;
}
): ELResponse<R> {
return {
jsonrpc: payload.jsonrpc,
id: payload.id,
result: res,
error,
};
}
export function generateUnverifiedResponseForPayload<P, D = unknown>(
payload: ELRequestPayload<P>,
message: string,
data?: D
): ELResponse<never, D> {
return {
jsonrpc: payload.jsonrpc,
id: payload.id,
error: {
code: UNVERIFIED_RESPONSE_CODE,
message,
data,
},
};
}

View File

@@ -0,0 +1,46 @@
import {LogData, Logger, LoggerChildOpts} from "@lodestar/utils";
import {ELRequestPayload} from "../types.js";
const printLogData = (data: LogData): string => {
if (!Array.isArray(data) && data !== null && typeof data === "object") {
return Object.entries(data)
.map(([key, value]) => `${key}=${value}`)
.join(" ");
}
return JSON.stringify(data);
};
const stdLogHandler = (level: string): ((message: string, context?: LogData, error?: Error | undefined) => void) => {
if (process === undefined) {
return (message: string, context?: LogData, error?: Error | undefined): void => {
// eslint-disable-next-line no-console
console.log(
`${level}: ${message} ${context === undefined ? "" : printLogData(context)} ${error ? error.stack : ""}`
);
};
}
return (message: string, context?: LogData, error?: Error | undefined): void => {
const stream = level === "error" ? process.stderr : process.stdout;
stream.write(
`${level}: ${message} ${context === undefined ? "" : printLogData(context)} ${error ? error.stack : ""}\n`
);
};
};
export const stdLogger: Logger = {
error: stdLogHandler("error"),
warn: stdLogHandler("warn"),
info: stdLogHandler("info"),
debug: stdLogHandler("debug"),
verbose: stdLogHandler("verb"),
// eslint-disable-next-line func-names
child: function (_options: LoggerChildOpts): Logger {
throw new Error("Not supported.");
},
};
export function logRequest({logger, payload}: {logger: Logger; payload: ELRequestPayload}): void {
logger.debug(
`Req method=${payload.method} params=${payload.params === undefined ? "" : JSON.stringify(payload.params)}`
);
}

View File

@@ -0,0 +1,34 @@
import http from "node:http";
import {ELRequestPayload, ELResponse} from "../types.js";
export const fetchRequestPayload = async (req: http.IncomingMessage): Promise<ELRequestPayload> => {
return new Promise((resolve, reject) => {
let body = "";
req.on("data", (chunk) => {
body += chunk;
});
req.on("end", () => {
try {
resolve(JSON.parse(body) as ELRequestPayload);
} catch (err) {
reject(err);
}
});
});
};
export const fetchResponseBody = async (res: http.IncomingMessage): Promise<ELResponse> => {
return new Promise((resolve, reject) => {
let body = "";
res.on("data", (chunk) => {
body += chunk;
});
res.on("end", () => {
try {
resolve(JSON.parse(body) as ELResponse);
} catch (err) {
reject(err);
}
});
});
};

View File

@@ -0,0 +1,75 @@
import fs from "node:fs";
import path from "node:path";
import {fileURLToPath} from "node:url";
import findUp from "find-up";
import {readAndGetGitData} from "./gitData/index.js";
// Global variable __dirname no longer available in ES6 modules.
// Solutions: https://stackoverflow.com/questions/46745014/alternative-for-dirname-in-node-js-when-using-es6-modules
// eslint-disable-next-line @typescript-eslint/naming-convention
const __dirname = path.dirname(fileURLToPath(import.meta.url));
type VersionJson = {
/** "0.28.2" */
version: string;
};
const BRANCH_IGNORE = /^(HEAD|master|unstable|main)$/;
/**
* Gathers all information on package version including Git data.
* @returns a version string, e.g.
* - Stable release: `v0.36.0/80c248bb`
* - Dev release: `v0.36.0-dev.80c248bb/80c248bb`
* - Test branch: `v0.36.0/developer-feature/80c248bb`
*/
export function getVersionData(): {
version: string;
commit: string;
} {
const parts: string[] = [];
/** Returns local version from `lerna.json` or `package.json` as `"0.28.2"` */
const localVersion = readCliPackageJson() || readVersionFromLernaJson();
if (localVersion) {
parts.push(`v${localVersion}`);
}
const {branch, commit} = readAndGetGitData();
// Add branch only if not present and not an ignore value
if (branch && !BRANCH_IGNORE.test(branch)) parts.push(branch);
// Add commit only if present. 7 characters to be consistent with Github
if (commit) {
const commitShort = commit.slice(0, 7);
// Don't add commit if it's already in the version string (dev versions)
if (!localVersion || !localVersion.includes(commitShort)) {
parts.push(commitShort);
}
}
return {
// Guard against empty parts array
version: parts.length > 0 ? parts.join("/") : "unknown",
commit,
};
}
/** Read version information from lerna.json */
function readVersionFromLernaJson(): string | undefined {
const filePath = findUp.sync("lerna.json", {cwd: __dirname});
if (!filePath) return undefined;
const lernaJson = JSON.parse(fs.readFileSync(filePath, "utf8")) as VersionJson;
return lernaJson.version;
}
/** Read version information from package.json */
function readCliPackageJson(): string | undefined {
const filePath = findUp.sync("package.json", {cwd: __dirname});
if (!filePath) return undefined;
const packageJson = JSON.parse(fs.readFileSync(filePath, "utf8")) as VersionJson;
return packageJson.version;
}

View File

@@ -0,0 +1,29 @@
import {ELVerifiedRequestHandler} from "../interfaces.js";
import {bufferToHex} from "../utils/conversion.js";
import {getELProof, isValidAccount, isValidStorageKeys} from "../utils/execution.js";
import {generateRPCResponseForPayload, generateUnverifiedResponseForPayload} from "../utils/json_rpc.js";
export const ethGetBalance: ELVerifiedRequestHandler<[address: string, block?: number | string], string> = async ({
handler,
payload,
rootProvider,
}) => {
const {
params: [address, block],
} = payload;
const executionPayload = await rootProvider.getExecutionPayload(block ?? "latest");
const proof = await getELProof(handler, [address, [], bufferToHex(executionPayload.blockHash)]);
if (
(await isValidAccount({
address: address,
stateRoot: executionPayload.stateRoot,
proof,
})) &&
(await isValidStorageKeys({storageKeys: [], proof}))
) {
return generateRPCResponseForPayload(payload, proof.balance);
}
return generateUnverifiedResponseForPayload(payload, "eth_getBalance request can not be verified.");
};

View File

@@ -0,0 +1,142 @@
import {NetworkName} from "@lodestar/config/networks";
import {
EIP1193Provider,
EthersProvider,
LCTransport,
RequestProvider,
SendAsyncProvider,
SendProvider,
Web3Provider,
} from "./interfaces.js";
import {ProofProvider} from "./proof_provider/proof_provider.js";
import {ELRequestPayload, ELResponse} from "./types.js";
import {
isEIP1193Provider,
isEthersProvider,
isRequestProvider,
isSendAsyncProvider,
isSendProvider,
} from "./utils/assertion.js";
import {processAndVerifyRequest} from "./utils/execution.js";
type ProvableProviderInitOpts = {network?: NetworkName; wsCheckpoint?: string} & (
| {transport: LCTransport.Rest; urls: string[]}
| {transport: LCTransport.P2P; bootnodes: string[]}
);
const defaultNetwork = "mainnet";
export function createVerifiedExecutionProvider<T extends Web3Provider>(
provider: T,
opts: ProvableProviderInitOpts
): {provider: T; proofProvider: ProofProvider} {
const controller = new AbortController();
const proofProvider = ProofProvider.init({
...opts,
network: opts.network ?? defaultNetwork,
signal: controller.signal,
});
if (isSendProvider(provider)) {
return {provider: handleSendProvider(provider, proofProvider) as T, proofProvider: proofProvider};
}
if (isEthersProvider(provider)) {
return {provider: handleEthersProvider(provider, proofProvider) as T, proofProvider: proofProvider};
}
if (isRequestProvider(provider)) {
return {provider: handleRequestProvider(provider, proofProvider) as T, proofProvider: proofProvider};
}
if (isSendAsyncProvider(provider)) {
return {provider: handleSendAsyncProvider(provider, proofProvider) as T, proofProvider: proofProvider};
}
if (isEIP1193Provider(provider)) {
return {provider: handleEIP1193Provider(provider, proofProvider) as T, proofProvider: proofProvider};
}
return {provider, proofProvider: proofProvider};
}
function handleSendProvider(provider: SendProvider, rootProvider: ProofProvider): SendProvider {
const send = provider.send.bind(provider);
const handler = (payload: ELRequestPayload): Promise<ELResponse | undefined> =>
new Promise((resolve, reject) => {
send(payload, (err, response) => {
if (err) {
reject(err);
} else {
resolve(response);
}
});
});
function newSend(payload: ELRequestPayload, callback: (err?: Error | null, response?: ELResponse) => void): void {
processAndVerifyRequest({payload, handler, proofProvider: rootProvider})
.then((response) => callback(undefined, response))
.catch((err) => callback(err, undefined));
}
return Object.assign(provider, {send: newSend});
}
function handleRequestProvider(provider: RequestProvider, rootProvider: ProofProvider): RequestProvider {
const request = provider.request.bind(provider);
const handler = (payload: ELRequestPayload): Promise<ELResponse | undefined> =>
new Promise((resolve, reject) => {
request(payload, (err, response) => {
if (err) {
reject(err);
} else {
resolve(response);
}
});
});
function newRequest(payload: ELRequestPayload, callback: (err?: Error | null, response?: ELResponse) => void): void {
processAndVerifyRequest({payload, handler, proofProvider: rootProvider})
.then((response) => callback(undefined, response))
.catch((err) => callback(err, undefined));
}
return Object.assign(provider, {request: newRequest});
}
function handleSendAsyncProvider(provider: SendAsyncProvider, rootProvider: ProofProvider): SendAsyncProvider {
const sendAsync = provider.sendAsync.bind(provider);
const handler = (payload: ELRequestPayload): Promise<ELResponse | undefined> => sendAsync(payload);
async function newSendAsync(payload: ELRequestPayload): Promise<ELResponse | undefined> {
return processAndVerifyRequest({payload, handler, proofProvider: rootProvider});
}
return Object.assign(provider, {sendAsync: newSendAsync});
}
function handleEIP1193Provider(provider: EIP1193Provider, rootProvider: ProofProvider): EIP1193Provider {
const request = provider.request.bind(provider);
const handler = (payload: ELRequestPayload): Promise<ELResponse | undefined> => request(payload);
async function newRequest(payload: ELRequestPayload): Promise<ELResponse | undefined> {
return processAndVerifyRequest({payload, handler, proofProvider: rootProvider});
}
return Object.assign(provider, {request: newRequest});
}
function handleEthersProvider(provider: EthersProvider, rootProvider: ProofProvider): EthersProvider {
const send = provider.send.bind(provider);
const handler = (payload: ELRequestPayload): Promise<ELResponse | undefined> => send(payload.method, payload.params);
async function newSend(method: string, params: Array<unknown>): Promise<ELResponse | undefined> {
return processAndVerifyRequest({
payload: {jsonrpc: "2.0", id: 0, method, params},
handler,
proofProvider: rootProvider,
});
}
return Object.assign(provider, {send: newSend});
}

View File

@@ -0,0 +1,122 @@
import http from "node:http";
import https from "node:https";
import url from "node:url";
import httpProxy from "http-proxy";
import {NetworkName} from "@lodestar/config/networks";
import {Logger} from "@lodestar/utils";
import {LCTransport} from "./interfaces.js";
import {ProofProvider} from "./proof_provider/proof_provider.js";
import {ELRequestPayload, ELResponse} from "./types.js";
import {processAndVerifyRequest} from "./utils/execution.js";
import {logRequest} from "./utils/logger.js";
import {generateRPCResponseForPayload} from "./utils/json_rpc.js";
import {fetchRequestPayload, fetchResponseBody} from "./utils/req_resp.js";
export type VerifiedProxyOptions = {
network: NetworkName;
executionRpcUrl: string;
logger: Logger;
wsCheckpoint?: string;
} & ({transport: LCTransport.Rest; urls: string[]} | {transport: LCTransport.P2P; bootnodes: string[]});
export function createVerifiedExecutionProxy(
opts: VerifiedProxyOptions
): {server: http.Server; proofProvider: ProofProvider} {
const {executionRpcUrl: executionUrl, logger, network} = opts;
const controller = new AbortController();
const proofProvider = ProofProvider.init({
...opts,
network: network,
signal: controller.signal,
});
const proxy = httpProxy.createProxy({
target: executionUrl,
ws: executionUrl.startsWith("ws"),
agent: https.globalAgent,
xfwd: true,
ignorePath: true,
changeOrigin: true,
});
let proxyServerListeningAddress: {host: string; port: number} | undefined;
function handler(payload: ELRequestPayload): Promise<ELResponse | undefined> {
return new Promise((resolve, reject) => {
if (!proxyServerListeningAddress) return reject(new Error("Proxy server not listening"));
const req = http.request(
{
method: "POST",
path: "/proxy",
port: proxyServerListeningAddress.port,
host: proxyServerListeningAddress.host,
signal: controller.signal,
headers: {
"Content-Type": "application/json",
},
},
(res) => {
fetchResponseBody(res).then(resolve).catch(reject);
}
);
req.write(JSON.stringify(payload));
req.end();
});
}
const proxyServer = http.createServer(function proxyRequestHandler(req, res) {
if (req.url === "/proxy") {
proxy.web(req, res);
return;
}
let payload: ELRequestPayload;
fetchRequestPayload(req)
.then((data) => {
payload = data;
logRequest({payload, logger});
return processAndVerifyRequest({payload, proofProvider, handler});
})
.then((response) => {
res.write(JSON.stringify(response));
res.end();
})
.catch((err) => {
res.write(JSON.stringify(generateRPCResponseForPayload(payload, undefined, {message: (err as Error).message})));
res.end();
});
});
proxyServer.on("listening", () => {
const address = proxyServer.address();
if (address === null) {
throw new Error("Invalid proxy server address");
}
if (typeof address === "string") {
const rawUrl = url.parse(address);
if (!rawUrl.host || !rawUrl.port || !rawUrl.protocol) {
throw new Error(`Invalid proxy server address: ${address}`);
}
proxyServerListeningAddress = {host: rawUrl.host, port: parseInt(rawUrl.port)};
} else {
proxyServerListeningAddress = {host: address.address, port: address.port};
}
logger.info(
`Lodestar Prover Proxy listening on ${proxyServerListeningAddress.host}:${proxyServerListeningAddress.port}`
);
});
proxyServer.on("upgrade", function proxyRequestUpgrade(req, socket, head) {
proxy.ws(req, socket, head);
});
controller.signal.addEventListener("abort", () => {
proxyServer.close();
});
return {server: proxyServer, proofProvider};
}

View File

@@ -0,0 +1,45 @@
import {expect} from "chai";
import Web3 from "web3";
import {ethers} from "ethers";
import {LCTransport} from "../../src/interfaces.js";
import {createVerifiedExecutionProvider} from "../../src/web3_provider.js";
describe("web3_provider", () => {
describe("createVerifiedExecutionProvider", function () {
// As the code will try to sync the light client, it may take a while
this.timeout(10000);
describe("web3", () => {
it("should connect to the network and call non-verified method", async () => {
const {provider} = createVerifiedExecutionProvider(
new Web3.providers.HttpProvider("https://lodestar-sepoliarpc.chainsafe.io"),
{
transport: LCTransport.Rest,
urls: ["https://lodestar-sepolia.chainsafe.io"],
network: "sepolia",
}
);
const web3 = new Web3(provider);
// `getProof` will always remain the non-verified method
// as we use it to create proof and verify
await expect(web3.eth.getProof("0xf97e180c050e5Ab072211Ad2C213Eb5AEE4DF134", [], "latest")).fulfilled;
});
});
describe("ethers", () => {
it("should connect to the network and call non-verified method", async () => {
const {provider} = createVerifiedExecutionProvider(
new ethers.JsonRpcProvider("https://lodestar-sepoliarpc.chainsafe.io"),
{
transport: LCTransport.Rest,
urls: ["https://lodestar-sepolia.chainsafe.io"],
network: "sepolia",
}
);
await expect(provider.send("eth_getProof", ["0xf97e180c050e5Ab072211Ad2C213Eb5AEE4DF134", [], "latest"]))
.fulfilled;
});
});
});
});

View File

@@ -0,0 +1,6 @@
import chai from "chai";
import chaiAsPromised from "chai-as-promised";
import sinonChai from "sinon-chai";
chai.use(chaiAsPromised);
chai.use(sinonChai);

View File

@@ -0,0 +1,27 @@
import {expect} from "chai";
import {ethers} from "ethers";
import Web3 from "web3";
import {isSendProvider} from "../../../src/utils/assertion.js";
describe("utils/assertion", () => {
describe("isSendProvider", () => {
it("should return true if provider is SendProvider", () => {
const provider = {
send: (_payload: any, _cb: () => void) => {
// Do nothing;
},
};
expect(isSendProvider(provider)).to.be.true;
});
it("should return false for ethers provider", () => {
const provider = new ethers.JsonRpcProvider("");
expect(isSendProvider(provider)).to.be.false;
});
it("should return true for web3 provider", () => {
const provider = new Web3.providers.HttpProvider("");
expect(isSendProvider(provider)).to.be.true;
});
});
});

View File

@@ -0,0 +1,192 @@
import {expect} from "chai";
import chai from "chai";
import chaiAsPromised from "chai-as-promised";
import {ELProof, ELStorageProof} from "../../../src/types.js";
import {isValidAccount, isValidStorageKeys} from "../../../src/utils/execution.js";
chai.use(chaiAsPromised);
describe("uitls/verification", () => {
describe("isValidAccount", () => {
it("should return true if account is valid", async () => {
const address = "0xf97e180c050e5ab072211ad2c213eb5aee4df134";
const stateRoot = Buffer.from("7c0f9a6f21d82c2d7690db7aa36c9938de11891071eed6e50ff8b06b5ae7018a", "hex");
const proof: ELProof = {
address: "0xf97e180c050e5ab072211ad2c213eb5aee4df134",
accountProof: [
"0xf90211a0d75d80002d4636855e0a3641ca53b495f0ccc3647af350ed23e86c4a9a21e089a0336a6387ac802d0d0433b65144610247ae2c4308b5f08683c641cddae571de52a0660dfa6b7ff69e6c4fe394e28c04097fd7c0d1e24d4d33731517ffeedeb73157a07d9a0299ad17ee459de4036254a522384bff041c32824761717abe82ed2250d5a0048f46242274fcfb2987dd9e1071335f966065322639b21a62a45ef9c4364707a06ab5c805de0350d7eb8641cfa3e985b58506529fc4f2175d018485b100fc8b57a030319f5efe302ba7d2cb593508af9e5b09eb8b961aac077f3e93e7c30d72b23aa08062584b94684d3585bc5cff5eb14ce8288cae9e6cca02d0145111fdeaec1ce8a05439df26136570dd0e3091ae189084e5445551e73de9032a589da1e858f493f7a074bfd76b5dfbaeb33d5caa06b0606eddfa9e2650b162a806e834535297a43f6fa0424651d0c1014d6575f418d84813d5fe61807162ae61eb9e2e29c099b62cff15a0717da7ec37ee17527323649684010b25b7fd7a53017645c92302aef90f66c696a0ce494b7c363d3ae8cca6fa9a6e60cf27b824f4e5acfa286b5b972d62e378d210a0861ef474c4cdeabd705bae1f3cc0a414129e7c2ad5c4fde69d5eea1757302046a0d8cc468f942d47cbc8349d221f958ae0505da2a4573db6a59859b743568cffcaa0fd6408c5b26aeba4d614d7043fd04082b9262e44dc966ad2c88add47b580325c80",
"0xf90211a03aa3b28c58df0f2425569105f33d2b6f06682549a6086ca70e435e083bc86642a03177615c6149235d6c65e332ee7cc959b359d5dfdf9f6e3d29148d0f21aae9cba00357883423f1f4d1c5a21ef3c3bac9d87df1776aac962b1179c6ab4fa4c90eb1a0e5cdccfd420d7d0a115819805fc36372603d3a17ce1c8b3eb475d5c92f48090ea001b2c7e0885020c533a4bffc63735c409a118c774fa9927f56f54670de7a66bda054744b8c7e8ef96f30199b4038ea26467ea1803d205c2941e64a92a4b69f3457a0b3d5d0084572d60091cfd0e24a4ffba0b3fb53b53b9b6f91b0bb09714738ea46a052b016189553636fc18eaefd5ca00364dce332376a2988c936fa2527d15e215fa095f7f13944352c399335b7fe8971641db0e9922d9a91279e422f10cc75e16c26a0f5e63e53a4ab4157d7f3824747518d45567f1fab8fe1d41d569d33a97cb0e252a086928b3e984b0b92e24a8c27465e673e6c6336b7a9967b758e9a77ff92edb4e3a0bf10d9217a9bee1822e8f9edf82ac1d17725748fb9dcb0a16364da24bfbaa002a020eacf91465e7edefad05c0cc88c9209278edd213329f2f157969884985ecfaba02a195ddc556ebd100289d11ba8a6c54724bca5c93de0cec1e567f4f4537d952ca07237f43e3b1c16b5d7112fff823c08ffdd6891f4c9d2cb06b6300a0e3551f5faa0410d0b37714dbc23a76101f88a9b5876ab6d0d026391a2b456ea6bb245ef3c9680",
"0xf90211a05cad7b7b1877521c405da0d0600be8f8dbfb6fe593fe7e0ff79b07cae70c0d44a04d7fa730f2cf4127121a4fbd783e97c372d00e2278f1475bd46df26ed1aba7e0a0d5368b792c687cfe7942dc1f9ece7941967d5a0b7d7c569c0dc3ec3c00cd8254a05c9f8de738470a1c962b38e95b32932afd26eae633f436960789acc024ac02c1a0b89a8e0ac2119c1ab9f40b233e2fb6878f7e41ccffa431c725d9e12c6589685ea059e306727f0c5c7ea0bc89ddbccd2d83ba5d9bc845a1be74e8677defcd0382a6a0a26aac0a6ff0f5e9e24eb6a0cc2d0813dd931d588941ce9ab4c74fd67bfe1b7fa07761bda76f79d20fc56a7f75d31295b3456fe001e12cb35bcd9cc12fdb5363faa07607ec1df2eca793d11953260a679a586793a1da4a55a2c8797741ee7ebd446ba0775277855b4d28645a7e3309d307616866e0df43f30255a5a8c0687430e0f0e4a0246affb48f550d0ea5f7b0ccdb14dd2b413356ae109d2522fd91379ecbe4c60ca0e82662910f79c26e9e5681ecbf79642fb02fe797ec7d17f23054543b00ef34dea0b6d81b91c67ac2db3e32d1556d6e6f699ce1cd7f2c0100be64c6264a0cd090d8a061a554d18934beddc75a67777326400ba6e254366d1ef59ee47bcfa91b3279e4a06f13cd680786a409ccbc9b84471b978a9efa02b09d1a64670ff6451a32d37b96a0e929123e076756fb6d43d668ddb199aca289b8c5869ec93842f1ef89c39c604f80",
"0xf90211a028f2b0b77d46dd1a39fd0164d641b78e69ef29ce2ab676f02ee116d910b323cea0fcef9afa59d09cb714351378fd2c64becee3b1e9f09220687596670befb3a216a035d92a41b8e7e8c8f22abdce06272884cf1e53be96aa39a8aa722f97d0c8ea85a028b36435bf98dd8bac1edb6fc162737117320b000cd7609ec0d418e8dcad859ba062aaed887f8084e22f78779a671181c603cfeadc33629e4dc716fb4b8165d782a0180471c441c16211a8f2a3d9cc3859216965338971ece92d5386a972e6460c4ea0f3267b9acf4d4d47c8f563625c3f6a05e6d919312692945a28d44ada0408a8a4a0f157cf2e66744bb885c90b96b0ac50f45677262671224a4f754b1693930545b0a024db528d804e437bcbd74c6478d09fdcd95104bed7a110c01d89d4b24c283caea0f7ddca48dbfc34175322b37e35be0e10b7e8f0cf4b0f3c0c64ca4cc7409558a7a0fc03b71345f0f7c2c784b3e9bb8793c8358553eaa9811943af4b7e1ab827d64ca09db546d194941254910642424ed644cc13d27862d797faec1c73d72d4d1aff38a07ad68175fa8ccebae05aca2ac077b98f8906dfe1c01a526060d45948794badeba0383a06be4451de334f19162fd2f2b3a01ccaa318f0b19f6053c7f6274702a92ea0c92e0b54e5c7e5f6f18e0aa9762e09e911f0f519654b945103d517154125e695a0dcf537bda42520ce6670c2f3fb546ea12d28518f06310da26a9df887a1fc2a4880",
"0xf90131a02fc4b4a237a100d29804e54134a3f590fe14cfc5932cf0d7db7d10b7c19359a580a07fb3394fdb1dc6cb6ee6bf74e16772783361840df5878b10284ef41f457a9cdda0b5d3f9bd4f88da36a3fbb68526053d7394f35411433bd88799510ce8a51e08dea0700e6a715dd6d428d31cbc9543cf768212e07692a79bd560e56d4f9547f3b6fb8080a0b295e4bf173f6a21ab742c03976070c3ba873b5fe457831de8411bf5ca988d08a057c069ca8e65fea0a889e16e770a7cc4f3125a35820deb3f0e6f8ae22e08d2eba0781511eba2101b49120073b03e24e79a3cdb0a02ee2d948e05a99a8b3b4145a4a011504843006186cba62c7f5563b3aee69c9f0325d6aadddc4c7c5ea246e50772a0ec6a3ac8944ef87fd684400258ef36a89194599884b606b032bedb29f42bcd008080808080",
"0xf8729e31610ff568919eaebec883afe43d94f9558d9c666ca54b3d3bfef2311d37b851f84f0c8b034f248d17939c513a7e80a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
],
balance: "0x34f248d17939c513a7e80",
codeHash: "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
nonce: "0xc",
storageHash: "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421",
storageProof: [],
};
await expect(
isValidAccount({
proof,
address,
stateRoot,
})
).eventually.to.be.true;
});
it("should fail with error if proof is valid but address is wrong", async () => {
const address = "0xe97e180c050e5ab072211ad2c213eb5aee4df134";
const stateRoot = Buffer.from("7c0f9a6f21d82c2d7690db7aa36c9938de11891071eed6e50ff8b06b5ae7018a", "hex");
const proof: ELProof = {
address: "0xf97e180c050e5ab072211ad2c213eb5aee4df134",
accountProof: [
"0xf90211a0d75d80002d4636855e0a3641ca53b495f0ccc3647af350ed23e86c4a9a21e089a0336a6387ac802d0d0433b65144610247ae2c4308b5f08683c641cddae571de52a0660dfa6b7ff69e6c4fe394e28c04097fd7c0d1e24d4d33731517ffeedeb73157a07d9a0299ad17ee459de4036254a522384bff041c32824761717abe82ed2250d5a0048f46242274fcfb2987dd9e1071335f966065322639b21a62a45ef9c4364707a06ab5c805de0350d7eb8641cfa3e985b58506529fc4f2175d018485b100fc8b57a030319f5efe302ba7d2cb593508af9e5b09eb8b961aac077f3e93e7c30d72b23aa08062584b94684d3585bc5cff5eb14ce8288cae9e6cca02d0145111fdeaec1ce8a05439df26136570dd0e3091ae189084e5445551e73de9032a589da1e858f493f7a074bfd76b5dfbaeb33d5caa06b0606eddfa9e2650b162a806e834535297a43f6fa0424651d0c1014d6575f418d84813d5fe61807162ae61eb9e2e29c099b62cff15a0717da7ec37ee17527323649684010b25b7fd7a53017645c92302aef90f66c696a0ce494b7c363d3ae8cca6fa9a6e60cf27b824f4e5acfa286b5b972d62e378d210a0861ef474c4cdeabd705bae1f3cc0a414129e7c2ad5c4fde69d5eea1757302046a0d8cc468f942d47cbc8349d221f958ae0505da2a4573db6a59859b743568cffcaa0fd6408c5b26aeba4d614d7043fd04082b9262e44dc966ad2c88add47b580325c80",
"0xf90211a03aa3b28c58df0f2425569105f33d2b6f06682549a6086ca70e435e083bc86642a03177615c6149235d6c65e332ee7cc959b359d5dfdf9f6e3d29148d0f21aae9cba00357883423f1f4d1c5a21ef3c3bac9d87df1776aac962b1179c6ab4fa4c90eb1a0e5cdccfd420d7d0a115819805fc36372603d3a17ce1c8b3eb475d5c92f48090ea001b2c7e0885020c533a4bffc63735c409a118c774fa9927f56f54670de7a66bda054744b8c7e8ef96f30199b4038ea26467ea1803d205c2941e64a92a4b69f3457a0b3d5d0084572d60091cfd0e24a4ffba0b3fb53b53b9b6f91b0bb09714738ea46a052b016189553636fc18eaefd5ca00364dce332376a2988c936fa2527d15e215fa095f7f13944352c399335b7fe8971641db0e9922d9a91279e422f10cc75e16c26a0f5e63e53a4ab4157d7f3824747518d45567f1fab8fe1d41d569d33a97cb0e252a086928b3e984b0b92e24a8c27465e673e6c6336b7a9967b758e9a77ff92edb4e3a0bf10d9217a9bee1822e8f9edf82ac1d17725748fb9dcb0a16364da24bfbaa002a020eacf91465e7edefad05c0cc88c9209278edd213329f2f157969884985ecfaba02a195ddc556ebd100289d11ba8a6c54724bca5c93de0cec1e567f4f4537d952ca07237f43e3b1c16b5d7112fff823c08ffdd6891f4c9d2cb06b6300a0e3551f5faa0410d0b37714dbc23a76101f88a9b5876ab6d0d026391a2b456ea6bb245ef3c9680",
"0xf90211a05cad7b7b1877521c405da0d0600be8f8dbfb6fe593fe7e0ff79b07cae70c0d44a04d7fa730f2cf4127121a4fbd783e97c372d00e2278f1475bd46df26ed1aba7e0a0d5368b792c687cfe7942dc1f9ece7941967d5a0b7d7c569c0dc3ec3c00cd8254a05c9f8de738470a1c962b38e95b32932afd26eae633f436960789acc024ac02c1a0b89a8e0ac2119c1ab9f40b233e2fb6878f7e41ccffa431c725d9e12c6589685ea059e306727f0c5c7ea0bc89ddbccd2d83ba5d9bc845a1be74e8677defcd0382a6a0a26aac0a6ff0f5e9e24eb6a0cc2d0813dd931d588941ce9ab4c74fd67bfe1b7fa07761bda76f79d20fc56a7f75d31295b3456fe001e12cb35bcd9cc12fdb5363faa07607ec1df2eca793d11953260a679a586793a1da4a55a2c8797741ee7ebd446ba0775277855b4d28645a7e3309d307616866e0df43f30255a5a8c0687430e0f0e4a0246affb48f550d0ea5f7b0ccdb14dd2b413356ae109d2522fd91379ecbe4c60ca0e82662910f79c26e9e5681ecbf79642fb02fe797ec7d17f23054543b00ef34dea0b6d81b91c67ac2db3e32d1556d6e6f699ce1cd7f2c0100be64c6264a0cd090d8a061a554d18934beddc75a67777326400ba6e254366d1ef59ee47bcfa91b3279e4a06f13cd680786a409ccbc9b84471b978a9efa02b09d1a64670ff6451a32d37b96a0e929123e076756fb6d43d668ddb199aca289b8c5869ec93842f1ef89c39c604f80",
"0xf90211a028f2b0b77d46dd1a39fd0164d641b78e69ef29ce2ab676f02ee116d910b323cea0fcef9afa59d09cb714351378fd2c64becee3b1e9f09220687596670befb3a216a035d92a41b8e7e8c8f22abdce06272884cf1e53be96aa39a8aa722f97d0c8ea85a028b36435bf98dd8bac1edb6fc162737117320b000cd7609ec0d418e8dcad859ba062aaed887f8084e22f78779a671181c603cfeadc33629e4dc716fb4b8165d782a0180471c441c16211a8f2a3d9cc3859216965338971ece92d5386a972e6460c4ea0f3267b9acf4d4d47c8f563625c3f6a05e6d919312692945a28d44ada0408a8a4a0f157cf2e66744bb885c90b96b0ac50f45677262671224a4f754b1693930545b0a024db528d804e437bcbd74c6478d09fdcd95104bed7a110c01d89d4b24c283caea0f7ddca48dbfc34175322b37e35be0e10b7e8f0cf4b0f3c0c64ca4cc7409558a7a0fc03b71345f0f7c2c784b3e9bb8793c8358553eaa9811943af4b7e1ab827d64ca09db546d194941254910642424ed644cc13d27862d797faec1c73d72d4d1aff38a07ad68175fa8ccebae05aca2ac077b98f8906dfe1c01a526060d45948794badeba0383a06be4451de334f19162fd2f2b3a01ccaa318f0b19f6053c7f6274702a92ea0c92e0b54e5c7e5f6f18e0aa9762e09e911f0f519654b945103d517154125e695a0dcf537bda42520ce6670c2f3fb546ea12d28518f06310da26a9df887a1fc2a4880",
"0xf90131a02fc4b4a237a100d29804e54134a3f590fe14cfc5932cf0d7db7d10b7c19359a580a07fb3394fdb1dc6cb6ee6bf74e16772783361840df5878b10284ef41f457a9cdda0b5d3f9bd4f88da36a3fbb68526053d7394f35411433bd88799510ce8a51e08dea0700e6a715dd6d428d31cbc9543cf768212e07692a79bd560e56d4f9547f3b6fb8080a0b295e4bf173f6a21ab742c03976070c3ba873b5fe457831de8411bf5ca988d08a057c069ca8e65fea0a889e16e770a7cc4f3125a35820deb3f0e6f8ae22e08d2eba0781511eba2101b49120073b03e24e79a3cdb0a02ee2d948e05a99a8b3b4145a4a011504843006186cba62c7f5563b3aee69c9f0325d6aadddc4c7c5ea246e50772a0ec6a3ac8944ef87fd684400258ef36a89194599884b606b032bedb29f42bcd008080808080",
"0xf8729e31610ff568919eaebec883afe43d94f9558d9c666ca54b3d3bfef2311d37b851f84f0c8b034f248d17939c513a7e80a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
],
balance: "0x34f248d17939c513a7e80",
codeHash: "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
nonce: "0xc",
storageHash: "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421",
storageProof: [],
};
await expect(
isValidAccount({
proof,
address,
stateRoot,
})
).rejectedWith("Invalid proof provided");
});
it("should fail with error if account is not valid", async () => {
const address = "0xf97e180c050e5ab072211ad2c213eb5aee4df134";
const stateRoot = Buffer.from("7c0f9a6f21d82c2d7690db7aa36c9938de11891071eed6e50ff8b06b5ae7018a", "hex");
const proof: ELProof = {
address: "0xf97e180c050e5ab072211ad2c213eb5aee4df134",
accountProof: [
"0xf90211a0d75d80002d4636755e0a3641ca53b495f0ccc3647af350ed23e86c4a9a21e089a0336a6387ac802d0d0433b65144610247ae2c4308b5f08683c641cddae571de52a0660dfa6b7ff69e6c4fe394e28c04097fd7c0d1e24d4d33731517ffeedeb73157a07d9a0299ad17ee459de4036254a522384bff041c32824761717abe82ed2250d5a0048f46242274fcfb2987dd9e1071335f966065322639b21a62a45ef9c4364707a06ab5c805de0350d7eb8641cfa3e985b58506529fc4f2175d018485b100fc8b57a030319f5efe302ba7d2cb593508af9e5b09eb8b961aac077f3e93e7c30d72b23aa08062584b94684d3585bc5cff5eb14ce8288cae9e6cca02d0145111fdeaec1ce8a05439df26136570dd0e3091ae189084e5445551e73de9032a589da1e858f493f7a074bfd76b5dfbaeb33d5caa06b0606eddfa9e2650b162a806e834535297a43f6fa0424651d0c1014d6575f418d84813d5fe61807162ae61eb9e2e29c099b62cff15a0717da7ec37ee17527323649684010b25b7fd7a53017645c92302aef90f66c696a0ce494b7c363d3ae8cca6fa9a6e60cf27b824f4e5acfa286b5b972d62e378d210a0861ef474c4cdeabd705bae1f3cc0a414129e7c2ad5c4fde69d5eea1757302046a0d8cc468f942d47cbc8349d221f958ae0505da2a4573db6a59859b743568cffcaa0fd6408c5b26aeba4d614d7043fd04082b9262e44dc966ad2c88add47b580325c80",
"0xf90211a03aa3b28c58df0f2425569105f33d2b6f06682549a6086ca70e435e083bc86642a03177615c6149235d6c65e332ee7cc959b359d5dfdf9f6e3d29148d0f21aae9cba00357883423f1f4d1c5a21ef3c3bac9d87df1776aac962b1179c6ab4fa4c90eb1a0e5cdccfd420d7d0a115819805fc36372603d3a17ce1c8b3eb475d5c92f48090ea001b2c7e0885020c533a4bffc63735c409a118c774fa9927f56f54670de7a66bda054744b8c7e8ef96f30199b4038ea26467ea1803d205c2941e64a92a4b69f3457a0b3d5d0084572d60091cfd0e24a4ffba0b3fb53b53b9b6f91b0bb09714738ea46a052b016189553636fc18eaefd5ca00364dce332376a2988c936fa2527d15e215fa095f7f13944352c399335b7fe8971641db0e9922d9a91279e422f10cc75e16c26a0f5e63e53a4ab4157d7f3824747518d45567f1fab8fe1d41d569d33a97cb0e252a086928b3e984b0b92e24a8c27465e673e6c6336b7a9967b758e9a77ff92edb4e3a0bf10d9217a9bee1822e8f9edf82ac1d17725748fb9dcb0a16364da24bfbaa002a020eacf91465e7edefad05c0cc88c9209278edd213329f2f157969884985ecfaba02a195ddc556ebd100289d11ba8a6c54724bca5c93de0cec1e567f4f4537d952ca07237f43e3b1c16b5d7112fff823c08ffdd6891f4c9d2cb06b6300a0e3551f5faa0410d0b37714dbc23a76101f88a9b5876ab6d0d026391a2b456ea6bb245ef3c9680",
"0xf90211a05cad7b7b1877521c405da0d0600be8f8dbfb6fe593fe7e0ff79b07cae70c0d44a04d7fa730f2cf4127121a4fbd783e97c372d00e2278f1475bd46df26ed1aba7e0a0d5368b792c687cfe7942dc1f9ece7941967d5a0b7d7c569c0dc3ec3c00cd8254a05c9f8de738470a1c962b38e95b32932afd26eae633f436960789acc024ac02c1a0b89a8e0ac2119c1ab9f40b233e2fb6878f7e41ccffa431c725d9e12c6589685ea059e306727f0c5c7ea0bc89ddbccd2d83ba5d9bc845a1be74e8677defcd0382a6a0a26aac0a6ff0f5e9e24eb6a0cc2d0813dd931d588941ce9ab4c74fd67bfe1b7fa07761bda76f79d20fc56a7f75d31295b3456fe001e12cb35bcd9cc12fdb5363faa07607ec1df2eca793d11953260a679a586793a1da4a55a2c8797741ee7ebd446ba0775277855b4d28645a7e3309d307616866e0df43f30255a5a8c0687430e0f0e4a0246affb48f550d0ea5f7b0ccdb14dd2b413356ae109d2522fd91379ecbe4c60ca0e82662910f79c26e9e5681ecbf79642fb02fe797ec7d17f23054543b00ef34dea0b6d81b91c67ac2db3e32d1556d6e6f699ce1cd7f2c0100be64c6264a0cd090d8a061a554d18934beddc75a67777326400ba6e254366d1ef59ee47bcfa91b3279e4a06f13cd680786a409ccbc9b84471b978a9efa02b09d1a64670ff6451a32d37b96a0e929123e076756fb6d43d668ddb199aca289b8c5869ec93842f1ef89c39c604f80",
"0xf90211a028f2b0b77d46dd1a39fd0164d641b78e69ef29ce2ab676f02ee116d910b323cea0fcef9afa59d09cb714351378fd2c64becee3b1e9f09220687596670befb3a216a035d92a41b8e7e8c8f22abdce06272884cf1e53be96aa39a8aa722f97d0c8ea85a028b36435bf98dd8bac1edb6fc162737117320b000cd7609ec0d418e8dcad859ba062aaed887f8084e22f78779a671181c603cfeadc33629e4dc716fb4b8165d782a0180471c441c16211a8f2a3d9cc3859216965338971ece92d5386a972e6460c4ea0f3267b9acf4d4d47c8f563625c3f6a05e6d919312692945a28d44ada0408a8a4a0f157cf2e66744bb885c90b96b0ac50f45677262671224a4f754b1693930545b0a024db528d804e437bcbd74c6478d09fdcd95104bed7a110c01d89d4b24c283caea0f7ddca48dbfc34175322b37e35be0e10b7e8f0cf4b0f3c0c64ca4cc7409558a7a0fc03b71345f0f7c2c784b3e9bb8793c8358553eaa9811943af4b7e1ab827d64ca09db546d194941254910642424ed644cc13d27862d797faec1c73d72d4d1aff38a07ad68175fa8ccebae05aca2ac077b98f8906dfe1c01a526060d45948794badeba0383a06be4451de334f19162fd2f2b3a01ccaa318f0b19f6053c7f6274702a92ea0c92e0b54e5c7e5f6f18e0aa9762e09e911f0f519654b945103d517154125e695a0dcf537bda42520ce6670c2f3fb546ea12d28518f06310da26a9df887a1fc2a4880",
"0xf90131a02fc4b4a237a100d29804e54134a3f590fe14cfc5932cf0d7db7d10b7c19359a580a07fb3394fdb1dc6cb6ee6bf74e16772783361840df5878b10284ef41f457a9cdda0b5d3f9bd4f88da36a3fbb68526053d7394f35411433bd88799510ce8a51e08dea0700e6a715dd6d428d31cbc9543cf768212e07692a79bd560e56d4f9547f3b6fb8080a0b295e4bf173f6a21ab742c03976070c3ba873b5fe457831de8411bf5ca988d08a057c069ca8e65fea0a889e16e770a7cc4f3125a35820deb3f0e6f8ae22e08d2eba0781511eba2101b49120073b03e24e79a3cdb0a02ee2d948e05a99a8b3b4145a4a011504843006186cba62c7f5563b3aee69c9f0325d6aadddc4c7c5ea246e50772a0ec6a3ac8944ef87fd684400258ef36a89194599884b606b032bedb29f42bcd008080808080",
"0xf8729e31610ff568919eaebec883afe43d94f9558d9c666ca54b3d3bfef2311d37b851f84f0c8b034f248d17939c513a7e80a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
],
balance: "0x34f248d17939c513a7e80",
codeHash: "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
nonce: "0xc",
storageHash: "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421",
storageProof: [],
};
await expect(
isValidAccount({
proof,
address,
stateRoot,
})
).rejectedWith("Invalid proof provided");
});
});
describe("isValidStorageKeys", () => {
it("should return true if storage keys are valid", async () => {
const proof: ELStorageProof = {
storageHash: "0x1b6a3261a5285e7d20f74f6f86134c894d395e39e15993eb4fad87a40f6af5e4",
storageProof: [
{
key: "0xa934b07068f5d95a11413ed6d08a4a1122dc4b8c14a6ab2d94f8b279dac63042",
value: "0x0",
proof: [
"0xf90211a035f929f33e4ff13e3c7a0f0cc1df0fd87ed58ac8a819091a0a1c05bb2f0cfc52a0d0ec64fd9c5883b3df24861cca332e437968ee38d98ec88670d09a235e94fd7aa0fcc64807aa553ac5e7e7ed716f5fb30b2af33a5e3e91ca60d7a5f2aa0dd6b0eca0edfd8d0f1559f25e5005ab9c2587f747a20e921145dc2cb276f717580ef49218a06066949aba34a1cdbc27e2a2de9f2673dd67a97729cadb8fd9e0be6bb8505e6da00023406388cbba918bd64aa14d6d1550f457aaaf212975d9ffa4a619363d8706a05b310a0ebf7270ef604b1f1c10f8e27dc1fc51b535715e3215f1ee04b47fe71ba01d1c1727fb02071b7350e1705cfcd88936d0465e4e075724e10cc6b0f1b18ef9a045e7acbb3d9f3a9886bad2befcebbc92ea7e338565ac76be3768e8da5bdbbf58a0d76ea3d82a08c5b565c7d6dffd7b9a8559006e8af04a264e8ae9c79ce729c3a7a0f1a8ba3190c4636f9570501509db1d209d8ac650b070a6a2ee973339b53efacca0e7c5ad879837168fd39cb45c986bffd8d3c99e487685213fef9fc54d06770e04a00556a20c4e9a24f2467ac80045c5d4c0671a2e79f9d475f12317119267b560f3a0c159b311bef30ab466930a3694c6ccc9ecf728332fb039aec3e49795f0469befa0ec31caaaa48bba20bbcf1c5b953ec47b491cd5c6903640e4102ce54613db19baa09e76eadd4788600b5b37b9dfb2e355802ac75ce65de5fb87e4d930974e5d3e2180",
"0xf90211a0a7af35500f30484963b3168c62d7a86299e02b5cd0b698a541fc75a637611a80a0eaa57f45d3864fd735614b8c3de99875e8537e3d957b8647cbadcfe503965128a07caa21a62ef79a3cb87cb922cc267d5675d2dfed0487edc35d888846f63cf5e8a05179f38e0b309df3cca585eccb07a2df7d158eecb1dcde8792db7d99af6658e4a0fcf8de500c8c8659a762438d35c8a24e535a4f6156ef55095f2c9682eda15115a0542184034e298de70cad346481b45a38b8c4a68b01d864d3bc753c37cad8922ba0efb0e25dce2f783c3ed699971ffed4397250c1f8947c93902215e0c4d4284812a016a20023cf42f458b9207aaf3864266953dde6718f07837104ab19231c331af6a08ac356f497d61d607c566be1c85bd331b7a192d4bf4fd579b3b08b97e69b0c90a004e200625b0f83e93b8225f044090fc899957964dcccda7a62b54e1a8d5e6a79a0bdaa3c3b540ceb4faefd4415196fb9f997cbf7f6c0e3e5c4c0cb881dcbdbfb5ba0124e452071958a977fd7606712ed22f0b814f8d0e46f6e161a74d449c328bafaa077caef292e5b69ba5dd1779ac56b58c6a0410ae0bc3463ca964c281b99ae624ba05b2cc97bc76ec70df7de2d470a36d299dda926c64d6886e574911e8f7c8b2b42a0626fbd444bbee61a5bd65703d9f3b60a17faeedc48e74308f76130142b3b5471a0f9c47f1313d03542e8e845d0f1f03a84d5bf1bd8ec209b73b0570fff7764ce3b80",
"0xf90211a0003d99fbd5ed6a3c36b3a94a7676f94e7320eb29f664d66350cef4e87e71d0a6a0e40b6e4d13bc2e79a1842c0459506e8d1eaf23c2abe6da8b652e9ac79887819ba082e41d8c0a86d81f229743c97ba23d0cfdcb348dc410a4d199c22b43954dc061a0ade1a10055a1b8dc66eb1d0b3f66533440f003a4b30435090cc737ccda7077c3a0a0391531281ea5fcfad3ac057627c3e5a7b26d8178fa3290db3d4bfeed426b17a0a18d4ea1516b9ce01b66a926e03cd545faef205594a9ab09ff181e4062900425a0aa0f007dc836777093c7a156c0a897b14b0cf31bbec89aea65df86f67e2a09c2a03973162b1b6133f91d10a420fb38d339f51f5b8e650d43a9644f82c17747b6f7a0429b1a67530253c5ef6d90799cefceea90d7623e7dc666d0611364feec413d40a0f5f898635d366ab1e1cc762044dc128879632b3463859a3b25e83796cf1c9bcda060ae20f12695ddcf0dcbf8c197314960faaf2b8a4a5d2ecb9a2c449d9dccd880a032b52c3864ece34069e471bd44057e066c80696db9755c1df264ce86c6aa9ee1a032b7170ee13afc61011dfad9d0df2c07dea31a3c316f6af67b94f00f044e1c7fa0ca5e38fbc4de6dad4903623374f29d92ea5ed65551f606f0f28261b0c773160fa009d43e3e4c62b2e15d7dd43ae54a2fc8708d24cd2cec9eafd69141a0d872f64aa03724b18354a914bd8d51cf2314d7c24f43319ab481ca16f49e70e77d74cbb51a80",
"0xf90211a048fdea9831823100a1666ec3f96d5a00d2f37a8a868a747e68021d88670290eca04646b8016c0e7af43ad1c940b3a5712f1c6ac58fbda60bde92b6016e931177f3a0b6b0831b8cf22b5174aa27fd4c544bcc0871bb168608d295cbd9b7f8d1ca079ba02801d39deb1c357535be4d33faaf8de67427e9811618fe479a24e76fe4811d12a0a8cf036b716b82b1fa92913d96d9c6a241b50b38bc8cae5ab9372b7a9fa3dfcea0377aba70aacf837b1e564f4b43e46176efcdd903f4cec79a537638513afaafe0a06e0d9903116d936bc8f961885433c183d1d4bc3508a60289cf05ad1b60f7f5e6a077875df7cc2ad0f68179dde7e0f6368febd10910ed53d21d00e0fcc507371342a07c511ff39ef9256b52e15888b132139226e1b2e224ae23e38df76b4d06a95544a0ca56d3fec4ca10d7cf0a5c0aabab6817e035e9659b7778d9a88a5dae4722db12a08f8c8c721d135838e16dbd50c4712841dd470088c244f509752bcff920253364a082ece15a206f20af75cf7242bc86e1bed19f54b19b6033d38b841bd896fd1933a0680e28a5de96750595091a9dd272aa9ff14ba945e146d179325267b7bf6f6508a081fe9eb2ce69e888a70a56d72568eb0e4891af7f20d4dd8f612280d0ca23cf40a0fa411cb7b0ec0a8f66bbd8253482161c7d01afed481a2777bf24793f36f71b22a027005997273df6d06aea5ad7de32f03713b0b482e805f9fcd83afd8c59d8848580",
"0xf90211a02747d543c8a3410efd989fa588c47d2507c9861bdec29574d4854653f6fc3ea6a072d4a756f1068e020f8f92bba67e0138bca21f12f6aca57dfdc2cc8a3d5e879aa063943cd42f578736b6298145ec83caa1d754a1088a188fae7cf4f47c61db4b10a08f71f34254ecc5537cc20a2f4a0f2af820a1dcca58dd3591e734eb53def4e67fa09f3f6123a8eb258afed0dd8975dab70f18e272e891a991c8df9822c8d0db20e5a089dcf67f4dad3b2d9cdb2bae92e6c00ec513e550f676b5810c25a98965307f02a0309d0894cf155124874ae733160c8dde21ea7edb97dd73e1bab37975933c4e83a0849e9985e2a24b8dc933ea58a6fde011c3860a1b246d372d47171cf54e1b2cf0a08f4d56cdabf06696c2ecf01a18c1df56897bc917e87d5372789d579801db445ea0f51d5ce538ee11f8d47b3734341b21d10503ce1252eab6d19cb3b153c5ff6f8ba0ede4a747adc81cea257ae5e7e9b3cbb32f5e10d146c78e9220ace7971ee147f4a0e20ab73cad6db6a50ef9db3905cfb48e332641f68e6cad00c7b4355c6bd20b7aa0c592be83fa9438fc7badb7cac1d852a54b0c730919ecb61ea39453f6958c1260a08bfccb533b082b21c4f3c0309eaca036037bd92ccfccfab95a1e14090862c8d1a091eb455f1fab1864a4fafc77d01887549131084fd8e9256fe506b556d873d4c3a0e442ec67b6d98eb50d369580dadcfe8e824f9cfa4f029269d2838137b3c614c080",
"0xf87180808080a08b5c90a6d4b6e4e89f1c192579185789fc3ffc6f8a17dac49c3053865934fca0808080a006c198dc3c6142b298108fee66b5688269b86a4a30429c43e53355699a765dda8080a007c901ff1e048505f63dbfb895b6a85403ff1b921066e12bfbc3878f803e909c8080808080",
],
},
],
};
const storageKeys = ["0xa934b07068f5d95a11413ed6d08a4a1122dc4b8c14a6ab2d94f8b279dac63042"];
await expect(
isValidStorageKeys({
proof,
storageKeys,
})
).eventually.to.be.true;
});
it("should fail with error for a wrong proof", async () => {
const proof: ELStorageProof = {
storageHash: "0x1b6a3261a5285e7d20f74f6f86134c894d395e39e15993eb4fad87a40f6af5e4",
storageProof: [
{
key: "0xa934b07068f5d95a11413ed6d08a4a1122dc4b8c14a6ab2d94f8b279dac63042",
value: "0x0",
proof: [
"0xe90211a035f929f33e4ff13e3c7a0f0cc1df0fd87ed58ac8a819091a0a1c05bb2f0cfc52a0d0ec64fd9c5883b3df24861cca332e437968ee38d98ec88670d09a235e94fd7aa0fcc64807aa553ac5e7e7ed716f5fb30b2af33a5e3e91ca60d7a5f2aa0dd6b0eca0edfd8d0f1559f25e5005ab9c2587f747a20e921145dc2cb276f717580ef49218a06066949aba34a1cdbc27e2a2de9f2673dd67a97729cadb8fd9e0be6bb8505e6da00023406388cbba918bd64aa14d6d1550f457aaaf212975d9ffa4a619363d8706a05b310a0ebf7270ef604b1f1c10f8e27dc1fc51b535715e3215f1ee04b47fe71ba01d1c1727fb02071b7350e1705cfcd88936d0465e4e075724e10cc6b0f1b18ef9a045e7acbb3d9f3a9886bad2befcebbc92ea7e338565ac76be3768e8da5bdbbf58a0d76ea3d82a08c5b565c7d6dffd7b9a8559006e8af04a264e8ae9c79ce729c3a7a0f1a8ba3190c4636f9570501509db1d209d8ac650b070a6a2ee973339b53efacca0e7c5ad879837168fd39cb45c986bffd8d3c99e487685213fef9fc54d06770e04a00556a20c4e9a24f2467ac80045c5d4c0671a2e79f9d475f12317119267b560f3a0c159b311bef30ab466930a3694c6ccc9ecf728332fb039aec3e49795f0469befa0ec31caaaa48bba20bbcf1c5b953ec47b491cd5c6903640e4102ce54613db19baa09e76eadd4788600b5b37b9dfb2e355802ac75ce65de5fb87e4d930974e5d3e2180",
"0xf90211a0a7af35500f30484963b3168c62d7a86299e02b5cd0b698a541fc75a637611a80a0eaa57f45d3864fd735614b8c3de99875e8537e3d957b8647cbadcfe503965128a07caa21a62ef79a3cb87cb922cc267d5675d2dfed0487edc35d888846f63cf5e8a05179f38e0b309df3cca585eccb07a2df7d158eecb1dcde8792db7d99af6658e4a0fcf8de500c8c8659a762438d35c8a24e535a4f6156ef55095f2c9682eda15115a0542184034e298de70cad346481b45a38b8c4a68b01d864d3bc753c37cad8922ba0efb0e25dce2f783c3ed699971ffed4397250c1f8947c93902215e0c4d4284812a016a20023cf42f458b9207aaf3864266953dde6718f07837104ab19231c331af6a08ac356f497d61d607c566be1c85bd331b7a192d4bf4fd579b3b08b97e69b0c90a004e200625b0f83e93b8225f044090fc899957964dcccda7a62b54e1a8d5e6a79a0bdaa3c3b540ceb4faefd4415196fb9f997cbf7f6c0e3e5c4c0cb881dcbdbfb5ba0124e452071958a977fd7606712ed22f0b814f8d0e46f6e161a74d449c328bafaa077caef292e5b69ba5dd1779ac56b58c6a0410ae0bc3463ca964c281b99ae624ba05b2cc97bc76ec70df7de2d470a36d299dda926c64d6886e574911e8f7c8b2b42a0626fbd444bbee61a5bd65703d9f3b60a17faeedc48e74308f76130142b3b5471a0f9c47f1313d03542e8e845d0f1f03a84d5bf1bd8ec209b73b0570fff7764ce3b80",
"0xf90211a0003d99fbd5ed6a3c36b3a94a7676f94e7320eb29f664d66350cef4e87e71d0a6a0e40b6e4d13bc2e79a1842c0459506e8d1eaf23c2abe6da8b652e9ac79887819ba082e41d8c0a86d81f229743c97ba23d0cfdcb348dc410a4d199c22b43954dc061a0ade1a10055a1b8dc66eb1d0b3f66533440f003a4b30435090cc737ccda7077c3a0a0391531281ea5fcfad3ac057627c3e5a7b26d8178fa3290db3d4bfeed426b17a0a18d4ea1516b9ce01b66a926e03cd545faef205594a9ab09ff181e4062900425a0aa0f007dc836777093c7a156c0a897b14b0cf31bbec89aea65df86f67e2a09c2a03973162b1b6133f91d10a420fb38d339f51f5b8e650d43a9644f82c17747b6f7a0429b1a67530253c5ef6d90799cefceea90d7623e7dc666d0611364feec413d40a0f5f898635d366ab1e1cc762044dc128879632b3463859a3b25e83796cf1c9bcda060ae20f12695ddcf0dcbf8c197314960faaf2b8a4a5d2ecb9a2c449d9dccd880a032b52c3864ece34069e471bd44057e066c80696db9755c1df264ce86c6aa9ee1a032b7170ee13afc61011dfad9d0df2c07dea31a3c316f6af67b94f00f044e1c7fa0ca5e38fbc4de6dad4903623374f29d92ea5ed65551f606f0f28261b0c773160fa009d43e3e4c62b2e15d7dd43ae54a2fc8708d24cd2cec9eafd69141a0d872f64aa03724b18354a914bd8d51cf2314d7c24f43319ab481ca16f49e70e77d74cbb51a80",
"0xf90211a048fdea9831823100a1666ec3f96d5a00d2f37a8a868a747e68021d88670290eca04646b8016c0e7af43ad1c940b3a5712f1c6ac58fbda60bde92b6016e931177f3a0b6b0831b8cf22b5174aa27fd4c544bcc0871bb168608d295cbd9b7f8d1ca079ba02801d39deb1c357535be4d33faaf8de67427e9811618fe479a24e76fe4811d12a0a8cf036b716b82b1fa92913d96d9c6a241b50b38bc8cae5ab9372b7a9fa3dfcea0377aba70aacf837b1e564f4b43e46176efcdd903f4cec79a537638513afaafe0a06e0d9903116d936bc8f961885433c183d1d4bc3508a60289cf05ad1b60f7f5e6a077875df7cc2ad0f68179dde7e0f6368febd10910ed53d21d00e0fcc507371342a07c511ff39ef9256b52e15888b132139226e1b2e224ae23e38df76b4d06a95544a0ca56d3fec4ca10d7cf0a5c0aabab6817e035e9659b7778d9a88a5dae4722db12a08f8c8c721d135838e16dbd50c4712841dd470088c244f509752bcff920253364a082ece15a206f20af75cf7242bc86e1bed19f54b19b6033d38b841bd896fd1933a0680e28a5de96750595091a9dd272aa9ff14ba945e146d179325267b7bf6f6508a081fe9eb2ce69e888a70a56d72568eb0e4891af7f20d4dd8f612280d0ca23cf40a0fa411cb7b0ec0a8f66bbd8253482161c7d01afed481a2777bf24793f36f71b22a027005997273df6d06aea5ad7de32f03713b0b482e805f9fcd83afd8c59d8848580",
"0xf90211a02747d543c8a3410efd989fa588c47d2507c9861bdec29574d4854653f6fc3ea6a072d4a756f1068e020f8f92bba67e0138bca21f12f6aca57dfdc2cc8a3d5e879aa063943cd42f578736b6298145ec83caa1d754a1088a188fae7cf4f47c61db4b10a08f71f34254ecc5537cc20a2f4a0f2af820a1dcca58dd3591e734eb53def4e67fa09f3f6123a8eb258afed0dd8975dab70f18e272e891a991c8df9822c8d0db20e5a089dcf67f4dad3b2d9cdb2bae92e6c00ec513e550f676b5810c25a98965307f02a0309d0894cf155124874ae733160c8dde21ea7edb97dd73e1bab37975933c4e83a0849e9985e2a24b8dc933ea58a6fde011c3860a1b246d372d47171cf54e1b2cf0a08f4d56cdabf06696c2ecf01a18c1df56897bc917e87d5372789d579801db445ea0f51d5ce538ee11f8d47b3734341b21d10503ce1252eab6d19cb3b153c5ff6f8ba0ede4a747adc81cea257ae5e7e9b3cbb32f5e10d146c78e9220ace7971ee147f4a0e20ab73cad6db6a50ef9db3905cfb48e332641f68e6cad00c7b4355c6bd20b7aa0c592be83fa9438fc7badb7cac1d852a54b0c730919ecb61ea39453f6958c1260a08bfccb533b082b21c4f3c0309eaca036037bd92ccfccfab95a1e14090862c8d1a091eb455f1fab1864a4fafc77d01887549131084fd8e9256fe506b556d873d4c3a0e442ec67b6d98eb50d369580dadcfe8e824f9cfa4f029269d2838137b3c614c080",
"0xf87180808080a08b5c90a6d4b6e4e89f1c192579185789fc3ffc6f8a17dac49c3053865934fca0808080a006c198dc3c6142b298108fee66b5688269b86a4a30429c43e53355699a765dda8080a007c901ff1e048505f63dbfb895b6a85403ff1b921066e12bfbc3878f803e909c8080808080",
],
},
],
};
const storageKeys = ["0xa934b07068f5d95a11413ed6d08a4a1122dc4b8c14a6ab2d94f8b279dac63042"];
await expect(
isValidStorageKeys({
proof,
storageKeys,
})
).rejectedWith("Invalid proof provided");
});
it("should fail with error for a non existance key", async () => {
const proof: ELStorageProof = {
storageHash: "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421",
storageProof: [
{
key: "0xf97e180c050e5ab072211ad2c213eb5aee4df134",
value: "0x0",
proof: [],
},
],
};
const storageKeys = ["0xa934b07068f5d95a11413ed6d08a4a1122dc4b8c14a6ab2d94f8b279dac63042"];
await expect(
isValidStorageKeys({
proof,
storageKeys,
})
).rejectedWith("Invalid proof provided");
});
it("should return true empty keys", async () => {
const proof: ELStorageProof = {
storageHash: "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421",
storageProof: [],
};
const storageKeys: string[] = [];
await expect(
isValidStorageKeys({
proof,
storageKeys,
})
).eventually.to.be.true;
});
});
});

View File

@@ -0,0 +1,42 @@
import {expect} from "chai";
import Web3 from "web3";
import {ethers} from "ethers";
import {LCTransport} from "../../src/interfaces.js";
import {ProofProvider} from "../../src/proof_provider/proof_provider.js";
import {createVerifiedExecutionProvider} from "../../src/web3_provider.js";
describe("web3_provider", () => {
describe("createVerifiedExecutionProvider", () => {
describe("web3", () => {
it("should create a verified execution provider for the web3 provider", () => {
const {provider, proofProvider} = createVerifiedExecutionProvider(
new Web3.providers.HttpProvider("https://lodestar-sepoliarpc.chainsafe.io"),
{
transport: LCTransport.Rest,
urls: ["https://lodestar-sepolia.chainsafe.io"],
network: "sepolia",
}
);
expect(provider).be.instanceof(Web3.providers.HttpProvider);
expect(proofProvider).be.instanceOf(ProofProvider);
});
});
describe("ethers", () => {
it("should create a verified execution provider for the ethers provider", () => {
const {provider, proofProvider} = createVerifiedExecutionProvider(
new ethers.JsonRpcProvider("https://lodestar-sepoliarpc.chainsafe.io"),
{
transport: LCTransport.Rest,
urls: ["https://lodestar-sepolia.chainsafe.io"],
network: "sepolia",
}
);
expect(provider).be.instanceof(ethers.JsonRpcProvider);
expect(proofProvider).be.instanceOf(ProofProvider);
});
});
});
});

View File

@@ -0,0 +1,9 @@
{
"extends": "../../tsconfig.build.json",
"include": ["src"],
"compilerOptions": {
"outDir": "lib",
// Had to add it because compiling error `Duplicate identifier 'Buffer'`
"skipLibCheck": true
}
}

View File

@@ -0,0 +1,12 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
// To fix error "Duplicate identifier 'Buffer'"
// This can save time during compilation at the expense of type-system accuracy.
// For example, two libraries could define two copies of the same type in an inconsistent way.
// Rather than doing a full check of all d.ts files, TypeScript will type check
// the code you specifically refer to in your apps source code.
// https://www.typescriptlang.org/tsconfig#skipLibCheck
"skipLibCheck": true
}
}

View File

@@ -0,0 +1,5 @@
const webpackConfig = require("../../webpack.test.config.js");
module.exports = {
...webpackConfig,
};

1582
yarn.lock

File diff suppressed because it is too large Load Diff