From 62d3e49f2888458366d3cbc9c9b84964c639ecb0 Mon Sep 17 00:00:00 2001 From: philknows Date: Wed, 10 Dec 2025 11:44:00 -0500 Subject: [PATCH 01/20] chore: bump package versions to 1.38.0 --- lerna.json | 2 +- packages/api/package.json | 10 ++++----- packages/beacon-node/package.json | 28 +++++++++++++------------- packages/cli/package.json | 26 ++++++++++++------------ packages/config/package.json | 8 ++++---- packages/db/package.json | 8 ++++---- packages/era/package.json | 10 ++++----- packages/flare/package.json | 14 ++++++------- packages/fork-choice/package.json | 12 +++++------ packages/light-client/package.json | 12 +++++------ packages/logger/package.json | 6 +++--- packages/params/package.json | 2 +- packages/prover/package.json | 18 ++++++++--------- packages/reqresp/package.json | 12 +++++------ packages/spec-test-util/package.json | 4 ++-- packages/state-transition/package.json | 10 ++++----- packages/test-utils/package.json | 6 +++--- packages/types/package.json | 4 ++-- packages/utils/package.json | 2 +- packages/validator/package.json | 18 ++++++++--------- 20 files changed, 106 insertions(+), 106 deletions(-) diff --git a/lerna.json b/lerna.json index 8530a4537c..2b9ef31721 100644 --- a/lerna.json +++ b/lerna.json @@ -3,7 +3,7 @@ "packages/*" ], "npmClient": "yarn", - "version": "1.37.0", + "version": "1.38.0", "stream": true, "command": { "version": { diff --git a/packages/api/package.json b/packages/api/package.json index b92aef9f27..b997aef9c5 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.37.0", + "version": "1.38.0", "type": "module", "exports": { ".": { @@ -76,10 +76,10 @@ "dependencies": { "@chainsafe/persistent-merkle-tree": "^1.2.1", "@chainsafe/ssz": "^1.2.2", - "@lodestar/config": "^1.37.0", - "@lodestar/params": "^1.37.0", - "@lodestar/types": "^1.37.0", - "@lodestar/utils": "^1.37.0", + "@lodestar/config": "^1.38.0", + "@lodestar/params": "^1.38.0", + "@lodestar/types": "^1.38.0", + "@lodestar/utils": "^1.38.0", "eventsource": "^2.0.2", "qs": "^6.11.1" }, diff --git a/packages/beacon-node/package.json b/packages/beacon-node/package.json index 3c92b38b19..867a50a2b9 100644 --- a/packages/beacon-node/package.json +++ b/packages/beacon-node/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.37.0", + "version": "1.38.0", "type": "module", "exports": { ".": { @@ -123,6 +123,7 @@ "@chainsafe/persistent-merkle-tree": "^1.2.1", "@chainsafe/prometheus-gc-stats": "^1.0.0", "@chainsafe/pubkey-index-map": "^3.0.0", + "@chainsafe/snappy-wasm": "^0.5.0", "@chainsafe/ssz": "^1.2.2", "@chainsafe/threads": "^1.11.3", "@crate-crypto/node-eth-kzg": "0.9.1", @@ -140,18 +141,18 @@ "@libp2p/peer-id": "^5.1.0", "@libp2p/prometheus-metrics": "^4.3.15", "@libp2p/tcp": "^10.1.8", - "@lodestar/api": "^1.37.0", - "@lodestar/config": "^1.37.0", - "@lodestar/db": "^1.37.0", - "@lodestar/fork-choice": "^1.37.0", - "@lodestar/light-client": "^1.37.0", - "@lodestar/logger": "^1.37.0", - "@lodestar/params": "^1.37.0", - "@lodestar/reqresp": "^1.37.0", - "@lodestar/state-transition": "^1.37.0", - "@lodestar/types": "^1.37.0", - "@lodestar/utils": "^1.37.0", - "@lodestar/validator": "^1.37.0", + "@lodestar/api": "^1.38.0", + "@lodestar/config": "^1.38.0", + "@lodestar/db": "^1.38.0", + "@lodestar/fork-choice": "^1.38.0", + "@lodestar/light-client": "^1.38.0", + "@lodestar/logger": "^1.38.0", + "@lodestar/params": "^1.38.0", + "@lodestar/reqresp": "^1.38.0", + "@lodestar/state-transition": "^1.38.0", + "@lodestar/types": "^1.38.0", + "@lodestar/utils": "^1.38.0", + "@lodestar/validator": "^1.38.0", "@multiformats/multiaddr": "^12.1.3", "datastore-core": "^10.0.2", "datastore-fs": "^10.0.6", @@ -166,7 +167,6 @@ "multiformats": "^11.0.1", "prom-client": "^15.1.0", "qs": "^6.11.1", - "@chainsafe/snappy-wasm": "^0.5.0", "strict-event-emitter-types": "^2.0.0", "systeminformation": "^5.22.9", "uint8arraylist": "^2.4.7", diff --git a/packages/cli/package.json b/packages/cli/package.json index 96f26d5a93..f8c1d77c3f 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "@chainsafe/lodestar", - "version": "1.37.0", + "version": "1.38.0", "description": "Command line interface for lodestar", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -69,17 +69,17 @@ "@libp2p/crypto": "^5.0.15", "@libp2p/interface": "^2.7.0", "@libp2p/peer-id": "^5.1.0", - "@lodestar/api": "^1.37.0", - "@lodestar/beacon-node": "^1.37.0", - "@lodestar/config": "^1.37.0", - "@lodestar/db": "^1.37.0", - "@lodestar/light-client": "^1.37.0", - "@lodestar/logger": "^1.37.0", - "@lodestar/params": "^1.37.0", - "@lodestar/state-transition": "^1.37.0", - "@lodestar/types": "^1.37.0", - "@lodestar/utils": "^1.37.0", - "@lodestar/validator": "^1.37.0", + "@lodestar/api": "^1.38.0", + "@lodestar/beacon-node": "^1.38.0", + "@lodestar/config": "^1.38.0", + "@lodestar/db": "^1.38.0", + "@lodestar/light-client": "^1.38.0", + "@lodestar/logger": "^1.38.0", + "@lodestar/params": "^1.38.0", + "@lodestar/state-transition": "^1.38.0", + "@lodestar/types": "^1.38.0", + "@lodestar/utils": "^1.38.0", + "@lodestar/validator": "^1.38.0", "@multiformats/multiaddr": "^12.1.3", "deepmerge": "^4.3.1", "ethers": "^6.7.0", @@ -94,7 +94,7 @@ "yargs": "^17.7.1" }, "devDependencies": { - "@lodestar/test-utils": "^1.37.0", + "@lodestar/test-utils": "^1.38.0", "@types/debug": "^4.1.7", "@types/inquirer": "^9.0.3", "@types/proper-lockfile": "^4.1.4", diff --git a/packages/config/package.json b/packages/config/package.json index e555d33c2b..b58a512a07 100644 --- a/packages/config/package.json +++ b/packages/config/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/config", - "version": "1.37.0", + "version": "1.38.0", "description": "Chain configuration required for lodestar", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -61,8 +61,8 @@ ], "dependencies": { "@chainsafe/ssz": "^1.2.2", - "@lodestar/params": "^1.37.0", - "@lodestar/types": "^1.37.0", - "@lodestar/utils": "^1.37.0" + "@lodestar/params": "^1.38.0", + "@lodestar/types": "^1.38.0", + "@lodestar/utils": "^1.38.0" } } diff --git a/packages/db/package.json b/packages/db/package.json index 489526b795..2ff9bc1d6f 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/db", - "version": "1.37.0", + "version": "1.38.0", "description": "DB modules of Lodestar", "author": "ChainSafe Systems", "homepage": "https://github.com/ChainSafe/lodestar#readme", @@ -44,12 +44,12 @@ }, "dependencies": { "@chainsafe/ssz": "^1.2.2", - "@lodestar/config": "^1.37.0", - "@lodestar/utils": "^1.37.0", + "@lodestar/config": "^1.38.0", + "@lodestar/utils": "^1.38.0", "classic-level": "^1.4.1", "it-all": "^3.0.4" }, "devDependencies": { - "@lodestar/logger": "^1.37.0" + "@lodestar/logger": "^1.38.0" } } diff --git a/packages/era/package.json b/packages/era/package.json index d7fec90008..6ba09999e5 100644 --- a/packages/era/package.json +++ b/packages/era/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.37.0", + "version": "1.38.0", "type": "module", "exports": { ".": { @@ -40,10 +40,10 @@ }, "dependencies": { "@chainsafe/blst": "^2.2.0", - "@lodestar/config": "^1.37.0", - "@lodestar/params": "^1.37.0", - "@lodestar/reqresp": "^1.37.0", - "@lodestar/types": "^1.37.0", + "@lodestar/config": "^1.38.0", + "@lodestar/params": "^1.38.0", + "@lodestar/reqresp": "^1.38.0", + "@lodestar/types": "^1.38.0", "uint8arraylist": "^2.4.7" } } diff --git a/packages/flare/package.json b/packages/flare/package.json index 2f2b83f8c7..2332b45aa9 100644 --- a/packages/flare/package.json +++ b/packages/flare/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/flare", - "version": "1.37.0", + "version": "1.38.0", "description": "Beacon chain debugging tool", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -50,12 +50,12 @@ "dependencies": { "@chainsafe/bls-keygen": "^0.4.0", "@chainsafe/blst": "^2.2.0", - "@lodestar/api": "^1.37.0", - "@lodestar/config": "^1.37.0", - "@lodestar/params": "^1.37.0", - "@lodestar/state-transition": "^1.37.0", - "@lodestar/types": "^1.37.0", - "@lodestar/utils": "^1.37.0", + "@lodestar/api": "^1.38.0", + "@lodestar/config": "^1.38.0", + "@lodestar/params": "^1.38.0", + "@lodestar/state-transition": "^1.38.0", + "@lodestar/types": "^1.38.0", + "@lodestar/utils": "^1.38.0", "source-map-support": "^0.5.21", "yargs": "^17.7.1" }, diff --git a/packages/fork-choice/package.json b/packages/fork-choice/package.json index afadc027c6..89a9aafb55 100644 --- a/packages/fork-choice/package.json +++ b/packages/fork-choice/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.37.0", + "version": "1.38.0", "type": "module", "exports": { ".": { @@ -40,11 +40,11 @@ }, "dependencies": { "@chainsafe/ssz": "^1.2.2", - "@lodestar/config": "^1.37.0", - "@lodestar/params": "^1.37.0", - "@lodestar/state-transition": "^1.37.0", - "@lodestar/types": "^1.37.0", - "@lodestar/utils": "^1.37.0" + "@lodestar/config": "^1.38.0", + "@lodestar/params": "^1.38.0", + "@lodestar/state-transition": "^1.38.0", + "@lodestar/types": "^1.38.0", + "@lodestar/utils": "^1.38.0" }, "keywords": [ "ethereum", diff --git a/packages/light-client/package.json b/packages/light-client/package.json index 388235163c..72a75e3c42 100644 --- a/packages/light-client/package.json +++ b/packages/light-client/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.37.0", + "version": "1.38.0", "type": "module", "exports": { ".": { @@ -67,11 +67,11 @@ "@chainsafe/blst": "^2.2.0", "@chainsafe/persistent-merkle-tree": "^1.2.1", "@chainsafe/ssz": "^1.2.2", - "@lodestar/api": "^1.37.0", - "@lodestar/config": "^1.37.0", - "@lodestar/params": "^1.37.0", - "@lodestar/types": "^1.37.0", - "@lodestar/utils": "^1.37.0", + "@lodestar/api": "^1.38.0", + "@lodestar/config": "^1.38.0", + "@lodestar/params": "^1.38.0", + "@lodestar/types": "^1.38.0", + "@lodestar/utils": "^1.38.0", "mitt": "^3.0.0" }, "devDependencies": { diff --git a/packages/logger/package.json b/packages/logger/package.json index d3e283baab..72b9987b56 100644 --- a/packages/logger/package.json +++ b/packages/logger/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.37.0", + "version": "1.38.0", "type": "module", "exports": { ".": { @@ -62,14 +62,14 @@ }, "types": "lib/index.d.ts", "dependencies": { - "@lodestar/utils": "^1.37.0", + "@lodestar/utils": "^1.38.0", "winston": "^3.8.2", "winston-daily-rotate-file": "^4.7.1", "winston-transport": "^4.5.0" }, "devDependencies": { "@chainsafe/threads": "^1.11.3", - "@lodestar/test-utils": "^1.37.0", + "@lodestar/test-utils": "^1.38.0", "@types/triple-beam": "^1.3.2", "triple-beam": "^1.3.0" }, diff --git a/packages/params/package.json b/packages/params/package.json index 7ac42fec90..19276a71db 100644 --- a/packages/params/package.json +++ b/packages/params/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/params", - "version": "1.37.0", + "version": "1.38.0", "description": "Chain parameters required for lodestar", "author": "ChainSafe Systems", "license": "Apache-2.0", diff --git a/packages/prover/package.json b/packages/prover/package.json index 7b5ed575ba..a1cd5760a2 100644 --- a/packages/prover/package.json +++ b/packages/prover/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.37.0", + "version": "1.38.0", "type": "module", "exports": { ".": { @@ -59,13 +59,13 @@ "@ethereumjs/tx": "^4.1.2", "@ethereumjs/util": "^8.0.6", "@ethereumjs/vm": "^6.4.2", - "@lodestar/api": "^1.37.0", - "@lodestar/config": "^1.37.0", - "@lodestar/light-client": "^1.37.0", - "@lodestar/logger": "^1.37.0", - "@lodestar/params": "^1.37.0", - "@lodestar/types": "^1.37.0", - "@lodestar/utils": "^1.37.0", + "@lodestar/api": "^1.38.0", + "@lodestar/config": "^1.38.0", + "@lodestar/light-client": "^1.38.0", + "@lodestar/logger": "^1.38.0", + "@lodestar/params": "^1.38.0", + "@lodestar/types": "^1.38.0", + "@lodestar/utils": "^1.38.0", "ethereum-cryptography": "^2.0.0", "find-up": "^6.3.0", "http-proxy": "^1.18.1", @@ -74,7 +74,7 @@ "yargs": "^17.7.1" }, "devDependencies": { - "@lodestar/test-utils": "^1.37.0", + "@lodestar/test-utils": "^1.38.0", "@types/http-proxy": "^1.17.10", "@types/yargs": "^17.0.24", "axios": "^1.3.4", diff --git a/packages/reqresp/package.json b/packages/reqresp/package.json index 847095f669..8b7c5c3e2f 100644 --- a/packages/reqresp/package.json +++ b/packages/reqresp/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.37.0", + "version": "1.38.0", "type": "module", "exports": { ".": { @@ -46,9 +46,9 @@ "dependencies": { "@chainsafe/fast-crc32c": "^4.2.0", "@libp2p/interface": "^2.7.0", - "@lodestar/config": "^1.37.0", - "@lodestar/params": "^1.37.0", - "@lodestar/utils": "^1.37.0", + "@lodestar/config": "^1.38.0", + "@lodestar/params": "^1.38.0", + "@lodestar/utils": "^1.38.0", "it-all": "^3.0.4", "it-pipe": "^3.0.1", "snappy": "^7.2.2", @@ -57,8 +57,8 @@ "uint8arraylist": "^2.4.7" }, "devDependencies": { - "@lodestar/logger": "^1.37.0", - "@lodestar/types": "^1.37.0", + "@lodestar/logger": "^1.38.0", + "@lodestar/types": "^1.38.0", "libp2p": "2.9.0" }, "peerDependencies": { diff --git a/packages/spec-test-util/package.json b/packages/spec-test-util/package.json index 0441d48f9a..86f510e7ae 100644 --- a/packages/spec-test-util/package.json +++ b/packages/spec-test-util/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/spec-test-util", - "version": "1.37.0", + "version": "1.38.0", "description": "Spec test suite generator from yaml test files", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -54,7 +54,7 @@ "blockchain" ], "dependencies": { - "@lodestar/utils": "^1.37.0", + "@lodestar/utils": "^1.38.0", "rimraf": "^4.4.1", "snappyjs": "^0.7.0", "vitest": "^4.0.7" diff --git a/packages/state-transition/package.json b/packages/state-transition/package.json index b828ae4b1d..643bbf7488 100644 --- a/packages/state-transition/package.json +++ b/packages/state-transition/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.37.0", + "version": "1.38.0", "type": "module", "exports": { ".": { @@ -62,10 +62,10 @@ "@chainsafe/pubkey-index-map": "^3.0.0", "@chainsafe/ssz": "^1.2.2", "@chainsafe/swap-or-not-shuffle": "^1.2.1", - "@lodestar/config": "^1.37.0", - "@lodestar/params": "^1.37.0", - "@lodestar/types": "^1.37.0", - "@lodestar/utils": "^1.37.0", + "@lodestar/config": "^1.38.0", + "@lodestar/params": "^1.38.0", + "@lodestar/types": "^1.38.0", + "@lodestar/utils": "^1.38.0", "bigint-buffer": "^1.1.5" }, "keywords": [ diff --git a/packages/test-utils/package.json b/packages/test-utils/package.json index bdfd58f91d..48e6846ce0 100644 --- a/packages/test-utils/package.json +++ b/packages/test-utils/package.json @@ -1,7 +1,7 @@ { "name": "@lodestar/test-utils", "private": true, - "version": "1.37.0", + "version": "1.38.0", "description": "Test utilities reused across other packages", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -52,8 +52,8 @@ "dependencies": { "@chainsafe/bls-keystore": "^3.1.0", "@chainsafe/blst": "^2.2.0", - "@lodestar/params": "^1.37.0", - "@lodestar/utils": "^1.37.0", + "@lodestar/params": "^1.38.0", + "@lodestar/utils": "^1.38.0", "axios": "^1.3.4", "tmp": "^0.2.1", "vitest": "^4.0.7" diff --git a/packages/types/package.json b/packages/types/package.json index 5686cd582d..405331ffbf 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.37.0", + "version": "1.38.0", "type": "module", "exports": { ".": { @@ -77,7 +77,7 @@ "types": "lib/index.d.ts", "dependencies": { "@chainsafe/ssz": "^1.2.2", - "@lodestar/params": "^1.37.0", + "@lodestar/params": "^1.38.0", "ethereum-cryptography": "^2.0.0" }, "keywords": [ diff --git a/packages/utils/package.json b/packages/utils/package.json index 2e1919d0ee..869758ff8d 100644 --- a/packages/utils/package.json +++ b/packages/utils/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.37.0", + "version": "1.38.0", "type": "module", "exports": { ".": { diff --git a/packages/validator/package.json b/packages/validator/package.json index 3a6a156a93..d648bf3ef1 100644 --- a/packages/validator/package.json +++ b/packages/validator/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/validator", - "version": "1.37.0", + "version": "1.38.0", "description": "A Typescript implementation of the validator client", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -50,17 +50,17 @@ "dependencies": { "@chainsafe/blst": "^2.2.0", "@chainsafe/ssz": "^1.2.2", - "@lodestar/api": "^1.37.0", - "@lodestar/config": "^1.37.0", - "@lodestar/db": "^1.37.0", - "@lodestar/params": "^1.37.0", - "@lodestar/state-transition": "^1.37.0", - "@lodestar/types": "^1.37.0", - "@lodestar/utils": "^1.37.0", + "@lodestar/api": "^1.38.0", + "@lodestar/config": "^1.38.0", + "@lodestar/db": "^1.38.0", + "@lodestar/params": "^1.38.0", + "@lodestar/state-transition": "^1.38.0", + "@lodestar/types": "^1.38.0", + "@lodestar/utils": "^1.38.0", "strict-event-emitter-types": "^2.0.0" }, "devDependencies": { - "@lodestar/test-utils": "^1.37.0", + "@lodestar/test-utils": "^1.38.0", "bigint-buffer": "^1.1.5", "rimraf": "^4.4.1" } From 1ddbe5d8704f0c44784212b7231bbc55b1fdee85 Mon Sep 17 00:00:00 2001 From: twoeths <10568965+twoeths@users.noreply.github.com> Date: Fri, 12 Dec 2025 02:19:08 +0700 Subject: [PATCH 02/20] chore: use index2pubkey of BeaconChain (#8674) **Motivation** - once we have `state-transition-z`, we're not able to get `index2pubkey` from a light view of BeaconState in beacon-node **Description** - in `beacon-node`, use `index2pubkey` of BeaconChain instead as a preparation for working with `state-transition-z` - it's ok to use `state.epochCtx.index2pubkey` in `state-transition` since it can access the full state there part of #8652 --------- Co-authored-by: Tuyen Nguyen --- .../src/chain/blocks/verifyBlock.ts | 1 + .../chain/blocks/verifyBlocksSignatures.ts | 5 +++-- packages/beacon-node/src/chain/chain.ts | 4 ++-- .../src/chain/rewards/syncCommitteeRewards.ts | 4 ++-- .../src/chain/validation/attesterSlashing.ts | 4 ++-- .../src/chain/validation/blobSidecar.ts | 12 +++++++++-- .../beacon-node/src/chain/validation/block.ts | 2 +- .../src/chain/validation/dataColumnSidecar.ts | 7 ++++++- .../src/chain/validation/proposerSlashing.ts | 2 +- .../signatureSets/contributionAndProof.ts | 5 +++-- .../validation/signatureSets/syncCommittee.ts | 4 +++- .../syncCommitteeSelectionProof.ts | 6 ++++-- .../src/chain/validation/syncCommittee.ts | 2 +- .../syncCommitteeContributionAndProof.ts | 9 ++++----- .../src/chain/validation/voluntaryExit.ts | 2 +- .../beacon-node/src/sync/backfill/backfill.ts | 11 ++++++++-- .../beacon-node/src/sync/backfill/verify.ts | 10 ++++++++-- packages/beacon-node/test/fixtures/capella.ts | 5 +++-- .../test/perf/chain/opPools/opPool.test.ts | 17 +++++++++++----- .../src/block/isValidIndexedAttestation.ts | 7 +++++-- .../src/block/processAttestationPhase0.ts | 9 ++++++++- .../src/block/processAttestationsAltair.ts | 2 +- .../src/block/processAttesterSlashing.ts | 9 ++++++--- .../src/block/processProposerSlashing.ts | 2 +- .../src/block/processRandao.ts | 2 +- .../src/block/processSyncCommittee.ts | 7 ++++--- .../src/block/processVoluntaryExit.ts | 2 +- .../src/signatureSets/attesterSlashings.ts | 10 +++++++--- .../src/signatureSets/index.ts | 17 +++++++++------- .../src/signatureSets/indexedAttestation.ts | 17 +++++++++++++--- .../src/signatureSets/proposer.ts | 20 ++++++++++++------- .../src/signatureSets/proposerSlashings.ts | 8 +++++--- .../src/signatureSets/randao.ts | 18 ++++++++++++----- .../src/signatureSets/voluntaryExits.ts | 11 ++++++---- .../state-transition/src/stateTransition.ts | 2 +- .../block/isValidIndexedAttestation.test.ts | 4 +++- .../unit/signatureSets/signatureSets.test.ts | 2 +- 37 files changed, 177 insertions(+), 84 deletions(-) diff --git a/packages/beacon-node/src/chain/blocks/verifyBlock.ts b/packages/beacon-node/src/chain/blocks/verifyBlock.ts index 7ff8f3397a..b7582ab872 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlock.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlock.ts @@ -143,6 +143,7 @@ export async function verifyBlocksInEpoch( // All signatures at once opts.skipVerifyBlockSignatures !== true ? verifyBlocksSignatures( + this.index2pubkey, this.bls, this.logger, this.metrics, diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts index 09c58d8973..ac13688a75 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts @@ -1,4 +1,4 @@ -import {CachedBeaconStateAllForks, getBlockSignatureSets} from "@lodestar/state-transition"; +import {CachedBeaconStateAllForks, Index2PubkeyCache, getBlockSignatureSets} from "@lodestar/state-transition"; import {IndexedAttestation, SignedBeaconBlock} from "@lodestar/types"; import {Logger} from "@lodestar/utils"; import {Metrics} from "../../metrics/metrics.js"; @@ -15,6 +15,7 @@ import {ImportBlockOpts} from "./types.js"; * Since all data is known in advance all signatures are verified at once in parallel. */ export async function verifyBlocksSignatures( + index2pubkey: Index2PubkeyCache, bls: IBlsVerifier, logger: Logger, metrics: Metrics | null, @@ -38,7 +39,7 @@ export async function verifyBlocksSignatures( : // // Verify signatures per block to track which block is invalid bls.verifySignatureSets( - getBlockSignatureSets(preState0, block, indexedAttestationsByBlock[i], { + getBlockSignatureSets(index2pubkey, preState0, block, indexedAttestationsByBlock[i], { skipProposerSignature: opts.validProposerSignature, }) ); diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index 091739b7d3..33bbf59241 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -757,7 +757,7 @@ export class BeaconChain implements IBeaconChain { RegenCaller.produceBlock ); const proposerIndex = state.epochCtx.getBeaconProposer(slot); - const proposerPubKey = state.epochCtx.index2pubkey[proposerIndex].toBytes(); + const proposerPubKey = this.index2pubkey[proposerIndex].toBytes(); const {body, produceResult, executionPayloadValue, shouldOverrideBuilder} = await produceBlockBody.call( this, @@ -1372,6 +1372,6 @@ export class BeaconChain implements IBeaconChain { preState = processSlots(preState, block.slot); // Dial preState's slot to block.slot - return computeSyncCommitteeRewards(block, preState.clone(), validatorIds); + return computeSyncCommitteeRewards(this.index2pubkey, block, preState.clone(), validatorIds); } } diff --git a/packages/beacon-node/src/chain/rewards/syncCommitteeRewards.ts b/packages/beacon-node/src/chain/rewards/syncCommitteeRewards.ts index e5b6532903..de3638a71e 100644 --- a/packages/beacon-node/src/chain/rewards/syncCommitteeRewards.ts +++ b/packages/beacon-node/src/chain/rewards/syncCommitteeRewards.ts @@ -1,12 +1,13 @@ import {routes} from "@lodestar/api"; import {ForkName, SYNC_COMMITTEE_SIZE} from "@lodestar/params"; -import {CachedBeaconStateAllForks, CachedBeaconStateAltair} from "@lodestar/state-transition"; +import {CachedBeaconStateAllForks, CachedBeaconStateAltair, Index2PubkeyCache} from "@lodestar/state-transition"; import {BeaconBlock, ValidatorIndex, altair} from "@lodestar/types"; export type SyncCommitteeRewards = routes.beacon.SyncCommitteeRewards; type BalanceRecord = {val: number}; // Use val for convenient way to increment/decrement balance export async function computeSyncCommitteeRewards( + index2pubkey: Index2PubkeyCache, block: BeaconBlock, preState: CachedBeaconStateAllForks, validatorIds: (ValidatorIndex | string)[] = [] @@ -18,7 +19,6 @@ export async function computeSyncCommitteeRewards( const altairBlock = block as altair.BeaconBlock; const preStateAltair = preState as CachedBeaconStateAltair; - const {index2pubkey} = preStateAltair.epochCtx; // Bound syncCommitteeValidatorIndices in case it goes beyond SYNC_COMMITTEE_SIZE just to be safe const syncCommitteeValidatorIndices = preStateAltair.epochCtx.currentSyncCommitteeIndexed.validatorIndices.slice( diff --git a/packages/beacon-node/src/chain/validation/attesterSlashing.ts b/packages/beacon-node/src/chain/validation/attesterSlashing.ts index 2aaca763b0..a672604007 100644 --- a/packages/beacon-node/src/chain/validation/attesterSlashing.ts +++ b/packages/beacon-node/src/chain/validation/attesterSlashing.ts @@ -43,7 +43,7 @@ export async function validateAttesterSlashing( // [REJECT] All of the conditions within process_attester_slashing pass validation. try { // verifySignature = false, verified in batch below - assertValidAttesterSlashing(state, attesterSlashing, false); + assertValidAttesterSlashing(chain.index2pubkey, state, attesterSlashing, false); } catch (e) { throw new AttesterSlashingError(GossipAction.REJECT, { code: AttesterSlashingErrorCode.INVALID, @@ -51,7 +51,7 @@ export async function validateAttesterSlashing( }); } - const signatureSets = getAttesterSlashingSignatureSets(state, attesterSlashing); + const signatureSets = getAttesterSlashingSignatureSets(chain.index2pubkey, state, attesterSlashing); if (!(await chain.bls.verifySignatureSets(signatureSets, {batchable: true, priority: prioritizeBls}))) { throw new AttesterSlashingError(GossipAction.REJECT, { code: AttesterSlashingErrorCode.INVALID, diff --git a/packages/beacon-node/src/chain/validation/blobSidecar.ts b/packages/beacon-node/src/chain/validation/blobSidecar.ts index d34e993a49..129dc77c69 100644 --- a/packages/beacon-node/src/chain/validation/blobSidecar.ts +++ b/packages/beacon-node/src/chain/validation/blobSidecar.ts @@ -137,7 +137,11 @@ export async function validateGossipBlobSidecar( // [REJECT] The proposer signature, signed_beacon_block.signature, is valid with respect to the proposer_index pubkey. const signature = blobSidecar.signedBlockHeader.signature; if (!chain.seenBlockInputCache.isVerifiedProposerSignature(blobSlot, blockHex, signature)) { - const signatureSet = getBlockHeaderProposerSignatureSetByParentStateSlot(blockState, blobSidecar.signedBlockHeader); + const signatureSet = getBlockHeaderProposerSignatureSetByParentStateSlot( + chain.index2pubkey, + blockState, + blobSidecar.signedBlockHeader + ); // Don't batch so verification is not delayed if (!(await chain.bls.verifySignatureSets([signatureSet], {verifyOnMainThread: true}))) { throw new BlobSidecarGossipError(GossipAction.REJECT, { @@ -240,7 +244,11 @@ export async function validateBlockBlobSidecars( const signature = firstSidecarSignedBlockHeader.signature; if (!chain.seenBlockInputCache.isVerifiedProposerSignature(blockSlot, blockRootHex, signature)) { const headState = await chain.getHeadState(); - const signatureSet = getBlockHeaderProposerSignatureSetByHeaderSlot(headState, firstSidecarSignedBlockHeader); + const signatureSet = getBlockHeaderProposerSignatureSetByHeaderSlot( + chain.index2pubkey, + headState, + firstSidecarSignedBlockHeader + ); if ( !(await chain.bls.verifySignatureSets([signatureSet], { diff --git a/packages/beacon-node/src/chain/validation/block.ts b/packages/beacon-node/src/chain/validation/block.ts index cc45a2625e..974b2c1e07 100644 --- a/packages/beacon-node/src/chain/validation/block.ts +++ b/packages/beacon-node/src/chain/validation/block.ts @@ -154,7 +154,7 @@ export async function validateGossipBlock( // [REJECT] The proposer signature, signed_beacon_block.signature, is valid with respect to the proposer_index pubkey. if (!chain.seenBlockInputCache.isVerifiedProposerSignature(blockSlot, blockRoot, signedBlock.signature)) { - const signatureSet = getBlockProposerSignatureSet(blockState, signedBlock); + const signatureSet = getBlockProposerSignatureSet(chain.index2pubkey, blockState, signedBlock); // Don't batch so verification is not delayed if (!(await chain.bls.verifySignatureSets([signatureSet], {verifyOnMainThread: true}))) { throw new BlockGossipError(GossipAction.REJECT, { diff --git a/packages/beacon-node/src/chain/validation/dataColumnSidecar.ts b/packages/beacon-node/src/chain/validation/dataColumnSidecar.ts index 1fbc122d76..6b37afcbff 100644 --- a/packages/beacon-node/src/chain/validation/dataColumnSidecar.ts +++ b/packages/beacon-node/src/chain/validation/dataColumnSidecar.ts @@ -135,6 +135,7 @@ export async function validateGossipDataColumnSidecar( const signature = dataColumnSidecar.signedBlockHeader.signature; if (!chain.seenBlockInputCache.isVerifiedProposerSignature(blockHeader.slot, blockRootHex, signature)) { const signatureSet = getBlockHeaderProposerSignatureSetByParentStateSlot( + chain.index2pubkey, blockState, dataColumnSidecar.signedBlockHeader ); @@ -336,7 +337,11 @@ export async function validateBlockDataColumnSidecars( const signature = firstSidecarSignedBlockHeader.signature; if (!chain.seenBlockInputCache.isVerifiedProposerSignature(slot, rootHex, signature)) { const headState = await chain.getHeadState(); - const signatureSet = getBlockHeaderProposerSignatureSetByHeaderSlot(headState, firstSidecarSignedBlockHeader); + const signatureSet = getBlockHeaderProposerSignatureSetByHeaderSlot( + chain.index2pubkey, + headState, + firstSidecarSignedBlockHeader + ); if ( !(await chain.bls.verifySignatureSets([signatureSet], { diff --git a/packages/beacon-node/src/chain/validation/proposerSlashing.ts b/packages/beacon-node/src/chain/validation/proposerSlashing.ts index 8d76470bb0..8825fabdbd 100644 --- a/packages/beacon-node/src/chain/validation/proposerSlashing.ts +++ b/packages/beacon-node/src/chain/validation/proposerSlashing.ts @@ -44,7 +44,7 @@ async function validateProposerSlashing( }); } - const signatureSets = getProposerSlashingSignatureSets(state, proposerSlashing); + const signatureSets = getProposerSlashingSignatureSets(chain.index2pubkey, state, proposerSlashing); if (!(await chain.bls.verifySignatureSets(signatureSets, {batchable: true, priority: prioritizeBls}))) { throw new ProposerSlashingError(GossipAction.REJECT, { code: ProposerSlashingErrorCode.INVALID, diff --git a/packages/beacon-node/src/chain/validation/signatureSets/contributionAndProof.ts b/packages/beacon-node/src/chain/validation/signatureSets/contributionAndProof.ts index 4fe5122c0d..d56febdda5 100644 --- a/packages/beacon-node/src/chain/validation/signatureSets/contributionAndProof.ts +++ b/packages/beacon-node/src/chain/validation/signatureSets/contributionAndProof.ts @@ -2,16 +2,17 @@ import {DOMAIN_CONTRIBUTION_AND_PROOF} from "@lodestar/params"; import { CachedBeaconStateAllForks, ISignatureSet, + Index2PubkeyCache, SignatureSetType, computeSigningRoot, } from "@lodestar/state-transition"; import {altair, ssz} from "@lodestar/types"; export function getContributionAndProofSignatureSet( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedContributionAndProof: altair.SignedContributionAndProof ): ISignatureSet { - const {epochCtx} = state; const domain = state.config.getDomain( state.slot, DOMAIN_CONTRIBUTION_AND_PROOF, @@ -20,7 +21,7 @@ export function getContributionAndProofSignatureSet( const signingData = signedContributionAndProof.message; return { type: SignatureSetType.single, - pubkey: epochCtx.index2pubkey[signedContributionAndProof.message.aggregatorIndex], + pubkey: index2pubkey[signedContributionAndProof.message.aggregatorIndex], signingRoot: computeSigningRoot(ssz.altair.ContributionAndProof, signingData, domain), signature: signedContributionAndProof.signature, }; diff --git a/packages/beacon-node/src/chain/validation/signatureSets/syncCommittee.ts b/packages/beacon-node/src/chain/validation/signatureSets/syncCommittee.ts index 7765e4d89e..489b92d2a6 100644 --- a/packages/beacon-node/src/chain/validation/signatureSets/syncCommittee.ts +++ b/packages/beacon-node/src/chain/validation/signatureSets/syncCommittee.ts @@ -2,12 +2,14 @@ import {DOMAIN_SYNC_COMMITTEE} from "@lodestar/params"; import { CachedBeaconStateAllForks, ISignatureSet, + Index2PubkeyCache, SignatureSetType, computeSigningRoot, } from "@lodestar/state-transition"; import {altair, ssz} from "@lodestar/types"; export function getSyncCommitteeSignatureSet( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, syncCommittee: altair.SyncCommitteeMessage ): ISignatureSet { @@ -15,7 +17,7 @@ export function getSyncCommitteeSignatureSet( return { type: SignatureSetType.single, - pubkey: state.epochCtx.index2pubkey[syncCommittee.validatorIndex], + pubkey: index2pubkey[syncCommittee.validatorIndex], signingRoot: computeSigningRoot(ssz.Root, syncCommittee.beaconBlockRoot, domain), signature: syncCommittee.signature, }; diff --git a/packages/beacon-node/src/chain/validation/signatureSets/syncCommitteeSelectionProof.ts b/packages/beacon-node/src/chain/validation/signatureSets/syncCommitteeSelectionProof.ts index b94ae87b24..cda51529d4 100644 --- a/packages/beacon-node/src/chain/validation/signatureSets/syncCommitteeSelectionProof.ts +++ b/packages/beacon-node/src/chain/validation/signatureSets/syncCommitteeSelectionProof.ts @@ -2,16 +2,18 @@ import {DOMAIN_SYNC_COMMITTEE_SELECTION_PROOF} from "@lodestar/params"; import { CachedBeaconStateAllForks, ISignatureSet, + Index2PubkeyCache, SignatureSetType, computeSigningRoot, } from "@lodestar/state-transition"; import {altair, ssz} from "@lodestar/types"; export function getSyncCommitteeSelectionProofSignatureSet( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, contributionAndProof: altair.ContributionAndProof ): ISignatureSet { - const {epochCtx, config} = state; + const {config} = state; const slot = contributionAndProof.contribution.slot; const domain = config.getDomain(state.slot, DOMAIN_SYNC_COMMITTEE_SELECTION_PROOF, slot); const signingData: altair.SyncAggregatorSelectionData = { @@ -20,7 +22,7 @@ export function getSyncCommitteeSelectionProofSignatureSet( }; return { type: SignatureSetType.single, - pubkey: epochCtx.index2pubkey[contributionAndProof.aggregatorIndex], + pubkey: index2pubkey[contributionAndProof.aggregatorIndex], signingRoot: computeSigningRoot(ssz.altair.SyncAggregatorSelectionData, signingData, domain), signature: contributionAndProof.selectionProof, }; diff --git a/packages/beacon-node/src/chain/validation/syncCommittee.ts b/packages/beacon-node/src/chain/validation/syncCommittee.ts index fb4b7741b4..6995157bf1 100644 --- a/packages/beacon-node/src/chain/validation/syncCommittee.ts +++ b/packages/beacon-node/src/chain/validation/syncCommittee.ts @@ -89,7 +89,7 @@ async function validateSyncCommitteeSigOnly( syncCommittee: altair.SyncCommitteeMessage, prioritizeBls = false ): Promise { - const signatureSet = getSyncCommitteeSignatureSet(headState, syncCommittee); + const signatureSet = getSyncCommitteeSignatureSet(chain.index2pubkey, headState, syncCommittee); if (!(await chain.bls.verifySignatureSets([signatureSet], {batchable: true, priority: prioritizeBls}))) { throw new SyncCommitteeError(GossipAction.REJECT, { code: SyncCommitteeErrorCode.INVALID_SIGNATURE, diff --git a/packages/beacon-node/src/chain/validation/syncCommitteeContributionAndProof.ts b/packages/beacon-node/src/chain/validation/syncCommitteeContributionAndProof.ts index c13fcfd729..e026b728c1 100644 --- a/packages/beacon-node/src/chain/validation/syncCommitteeContributionAndProof.ts +++ b/packages/beacon-node/src/chain/validation/syncCommitteeContributionAndProof.ts @@ -21,6 +21,7 @@ export async function validateSyncCommitteeGossipContributionAndProof( const contributionAndProof = signedContributionAndProof.message; const {contribution, aggregatorIndex} = contributionAndProof; const {subcommitteeIndex, slot} = contribution; + const {index2pubkey} = chain; const headState = chain.getHeadState(); validateGossipSyncCommitteeExceptSig(chain, headState, subcommitteeIndex, { @@ -73,16 +74,14 @@ export async function validateSyncCommitteeGossipContributionAndProof( // i.e. state.validators[contribution_and_proof.aggregator_index].pubkey in get_sync_subcommittee_pubkeys(state, contribution.subcommittee_index). // > Checked in validateGossipSyncCommitteeExceptSig() - const participantPubkeys = syncCommitteeParticipantIndices.map( - (validatorIndex) => headState.epochCtx.index2pubkey[validatorIndex] - ); + const participantPubkeys = syncCommitteeParticipantIndices.map((validatorIndex) => index2pubkey[validatorIndex]); const signatureSets = [ // [REJECT] The contribution_and_proof.selection_proof is a valid signature of the SyncAggregatorSelectionData // derived from the contribution by the validator with index contribution_and_proof.aggregator_index. - getSyncCommitteeSelectionProofSignatureSet(headState, contributionAndProof), + getSyncCommitteeSelectionProofSignatureSet(index2pubkey, headState, contributionAndProof), // [REJECT] The aggregator signature, signed_contribution_and_proof.signature, is valid. - getContributionAndProofSignatureSet(headState, signedContributionAndProof), + getContributionAndProofSignatureSet(index2pubkey, headState, signedContributionAndProof), // [REJECT] The aggregate signature is valid for the message beacon_block_root and aggregate pubkey derived from // the participation info in aggregation_bits for the subcommittee specified by the contribution.subcommittee_index. diff --git a/packages/beacon-node/src/chain/validation/voluntaryExit.ts b/packages/beacon-node/src/chain/validation/voluntaryExit.ts index 77b2e34b49..4c3f212e73 100644 --- a/packages/beacon-node/src/chain/validation/voluntaryExit.ts +++ b/packages/beacon-node/src/chain/validation/voluntaryExit.ts @@ -59,7 +59,7 @@ async function validateVoluntaryExit( }); } - const signatureSet = getVoluntaryExitSignatureSet(state, voluntaryExit); + const signatureSet = getVoluntaryExitSignatureSet(chain.index2pubkey, state, voluntaryExit); if (!(await chain.bls.verifySignatureSets([signatureSet], {batchable: true, priority: prioritizeBls}))) { throw new VoluntaryExitError(GossipAction.REJECT, { code: VoluntaryExitErrorCode.INVALID_SIGNATURE, diff --git a/packages/beacon-node/src/sync/backfill/backfill.ts b/packages/beacon-node/src/sync/backfill/backfill.ts index 30c60b3514..e62827dca5 100644 --- a/packages/beacon-node/src/sync/backfill/backfill.ts +++ b/packages/beacon-node/src/sync/backfill/backfill.ts @@ -750,7 +750,9 @@ export class BackfillSync extends (EventEmitter as {new (): BackfillSyncEmitter} // GENESIS_SLOT doesn't has valid signature if (anchorBlock.message.slot === GENESIS_SLOT) return; - await verifyBlockProposerSignature(this.chain.bls, this.chain.getHeadState(), [anchorBlock]); + await verifyBlockProposerSignature(this.chain.index2pubkey, this.chain.bls, this.chain.getHeadState(), [ + anchorBlock, + ]); // We can write to the disk if this is ahead of prevFinalizedCheckpointBlock otherwise // we will need to go make checks on the top of sync loop before writing as it might @@ -815,7 +817,12 @@ export class BackfillSync extends (EventEmitter as {new (): BackfillSyncEmitter} // If any of the block's proposer signature fail, we can't trust this peer at all if (verifiedBlocks.length > 0) { - await verifyBlockProposerSignature(this.chain.bls, this.chain.getHeadState(), verifiedBlocks); + await verifyBlockProposerSignature( + this.chain.index2pubkey, + this.chain.bls, + this.chain.getHeadState(), + verifiedBlocks + ); // This is bad, like super bad. Abort the backfill if (!nextAnchor) diff --git a/packages/beacon-node/src/sync/backfill/verify.ts b/packages/beacon-node/src/sync/backfill/verify.ts index 67727698a1..a0e09e96ee 100644 --- a/packages/beacon-node/src/sync/backfill/verify.ts +++ b/packages/beacon-node/src/sync/backfill/verify.ts @@ -1,6 +1,11 @@ import {BeaconConfig} from "@lodestar/config"; import {GENESIS_SLOT} from "@lodestar/params"; -import {CachedBeaconStateAllForks, ISignatureSet, getBlockProposerSignatureSet} from "@lodestar/state-transition"; +import { + CachedBeaconStateAllForks, + ISignatureSet, + Index2PubkeyCache, + getBlockProposerSignatureSet, +} from "@lodestar/state-transition"; import {Root, SignedBeaconBlock, Slot, ssz} from "@lodestar/types"; import {IBlsVerifier} from "../../chain/bls/index.js"; import {BackfillSyncError, BackfillSyncErrorCode} from "./errors.js"; @@ -41,6 +46,7 @@ export function verifyBlockSequence( } export async function verifyBlockProposerSignature( + index2pubkey: Index2PubkeyCache, bls: IBlsVerifier, state: CachedBeaconStateAllForks, blocks: SignedBeaconBlock[] @@ -48,7 +54,7 @@ export async function verifyBlockProposerSignature( if (blocks.length === 1 && blocks[0].message.slot === GENESIS_SLOT) return; const signatures = blocks.reduce((sigs: ISignatureSet[], block) => { // genesis block doesn't have valid signature - if (block.message.slot !== GENESIS_SLOT) sigs.push(getBlockProposerSignatureSet(state, block)); + if (block.message.slot !== GENESIS_SLOT) sigs.push(getBlockProposerSignatureSet(index2pubkey, state, block)); return sigs; }, []); diff --git a/packages/beacon-node/test/fixtures/capella.ts b/packages/beacon-node/test/fixtures/capella.ts index fe9b0206ef..0fed040a9f 100644 --- a/packages/beacon-node/test/fixtures/capella.ts +++ b/packages/beacon-node/test/fixtures/capella.ts @@ -1,7 +1,8 @@ -import {CachedBeaconStateAltair} from "@lodestar/state-transition"; +import {CachedBeaconStateAltair, Index2PubkeyCache} from "@lodestar/state-transition"; import {capella} from "@lodestar/types"; export function generateBlsToExecutionChanges( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAltair, count: number ): capella.SignedBLSToExecutionChange[] { @@ -10,7 +11,7 @@ export function generateBlsToExecutionChanges( for (const validatorIndex of state.epochCtx.proposers) { result.push({ message: { - fromBlsPubkey: state.epochCtx.index2pubkey[validatorIndex].toBytes(), + fromBlsPubkey: index2pubkey[validatorIndex].toBytes(), toExecutionAddress: Buffer.alloc(20), validatorIndex, }, diff --git a/packages/beacon-node/test/perf/chain/opPools/opPool.test.ts b/packages/beacon-node/test/perf/chain/opPools/opPool.test.ts index 125cfd5ba6..104ee51b77 100644 --- a/packages/beacon-node/test/perf/chain/opPools/opPool.test.ts +++ b/packages/beacon-node/test/perf/chain/opPools/opPool.test.ts @@ -6,7 +6,7 @@ import { MAX_PROPOSER_SLASHINGS, MAX_VOLUNTARY_EXITS, } from "@lodestar/params"; -import {CachedBeaconStateAltair} from "@lodestar/state-transition"; +import {CachedBeaconStateAltair, Index2PubkeyCache} from "@lodestar/state-transition"; import {ssz} from "@lodestar/types"; import {generatePerfTestCachedStateAltair} from "../../../../../state-transition/test/perf/util.js"; import {BlockType} from "../../../../src/chain/interface.js"; @@ -35,7 +35,8 @@ describe("opPool", () => { fillAttesterSlashing(pool, originalState, MAX_ATTESTER_SLASHINGS); fillProposerSlashing(pool, originalState, MAX_PROPOSER_SLASHINGS); fillVoluntaryExits(pool, originalState, MAX_VOLUNTARY_EXITS); - fillBlsToExecutionChanges(pool, originalState, MAX_BLS_TO_EXECUTION_CHANGES); + // TODO: feed index2pubkey separately instead of getting from originalState + fillBlsToExecutionChanges(originalState.epochCtx.index2pubkey, pool, originalState, MAX_BLS_TO_EXECUTION_CHANGES); return pool; }, @@ -53,7 +54,8 @@ describe("opPool", () => { fillAttesterSlashing(pool, originalState, maxItemsInPool); fillProposerSlashing(pool, originalState, maxItemsInPool); fillVoluntaryExits(pool, originalState, maxItemsInPool); - fillBlsToExecutionChanges(pool, originalState, maxItemsInPool); + // TODO: feed index2pubkey separately instead of getting from originalState + fillBlsToExecutionChanges(originalState.epochCtx.index2pubkey, pool, originalState, maxItemsInPool); return pool; }, @@ -99,8 +101,13 @@ function fillVoluntaryExits(pool: OpPool, state: CachedBeaconStateAltair, count: // This does not set the `withdrawalCredentials` for the validator // So it will be in the pool but not returned from `getSlashingsAndExits` -function fillBlsToExecutionChanges(pool: OpPool, state: CachedBeaconStateAltair, count: number): OpPool { - for (const blsToExecution of generateBlsToExecutionChanges(state, count)) { +function fillBlsToExecutionChanges( + index2pubkey: Index2PubkeyCache, + pool: OpPool, + state: CachedBeaconStateAltair, + count: number +): OpPool { + for (const blsToExecution of generateBlsToExecutionChanges(index2pubkey, state, count)) { pool.insertBlsToExecutionChange(blsToExecution); } diff --git a/packages/state-transition/src/block/isValidIndexedAttestation.ts b/packages/state-transition/src/block/isValidIndexedAttestation.ts index f9fb731621..e215f1631a 100644 --- a/packages/state-transition/src/block/isValidIndexedAttestation.ts +++ b/packages/state-transition/src/block/isValidIndexedAttestation.ts @@ -1,5 +1,6 @@ import {ForkSeq, MAX_COMMITTEES_PER_SLOT, MAX_VALIDATORS_PER_COMMITTEE} from "@lodestar/params"; import {IndexedAttestation, IndexedAttestationBigint} from "@lodestar/types"; +import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; import {getIndexedAttestationBigintSignatureSet, getIndexedAttestationSignatureSet} from "../signatureSets/index.js"; import {CachedBeaconStateAllForks} from "../types.js"; import {verifySignatureSet} from "../util/index.js"; @@ -8,6 +9,7 @@ import {verifySignatureSet} from "../util/index.js"; * Check if `indexedAttestation` has sorted and unique indices and a valid aggregate signature. */ export function isValidIndexedAttestation( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, indexedAttestation: IndexedAttestation, verifySignature: boolean @@ -17,12 +19,13 @@ export function isValidIndexedAttestation( } if (verifySignature) { - return verifySignatureSet(getIndexedAttestationSignatureSet(state, indexedAttestation)); + return verifySignatureSet(getIndexedAttestationSignatureSet(index2pubkey, state, indexedAttestation)); } return true; } export function isValidIndexedAttestationBigint( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, indexedAttestation: IndexedAttestationBigint, verifySignature: boolean @@ -32,7 +35,7 @@ export function isValidIndexedAttestationBigint( } if (verifySignature) { - return verifySignatureSet(getIndexedAttestationBigintSignatureSet(state, indexedAttestation)); + return verifySignatureSet(getIndexedAttestationBigintSignatureSet(index2pubkey, state, indexedAttestation)); } return true; } diff --git a/packages/state-transition/src/block/processAttestationPhase0.ts b/packages/state-transition/src/block/processAttestationPhase0.ts index 2c3732ecc3..c5ab1a6f24 100644 --- a/packages/state-transition/src/block/processAttestationPhase0.ts +++ b/packages/state-transition/src/block/processAttestationPhase0.ts @@ -50,7 +50,14 @@ export function processAttestationPhase0( state.previousEpochAttestations.push(pendingAttestation); } - if (!isValidIndexedAttestation(state, epochCtx.getIndexedAttestation(ForkSeq.phase0, attestation), verifySignature)) { + if ( + !isValidIndexedAttestation( + epochCtx.index2pubkey, + state, + epochCtx.getIndexedAttestation(ForkSeq.phase0, attestation), + verifySignature + ) + ) { throw new Error("Attestation is not valid"); } } diff --git a/packages/state-transition/src/block/processAttestationsAltair.ts b/packages/state-transition/src/block/processAttestationsAltair.ts index 1018f2d774..e467e9c79b 100644 --- a/packages/state-transition/src/block/processAttestationsAltair.ts +++ b/packages/state-transition/src/block/processAttestationsAltair.ts @@ -64,7 +64,7 @@ export function processAttestationsAltair( // TODO: Why should we verify an indexed attestation that we just created? If it's just for the signature // we can verify only that and nothing else. if (verifySignature) { - const sigSet = getAttestationWithIndicesSignatureSet(state, attestation, attestingIndices); + const sigSet = getAttestationWithIndicesSignatureSet(epochCtx.index2pubkey, state, attestation, attestingIndices); if (!verifySignatureSet(sigSet)) { throw new Error("Attestation signature is not valid"); } diff --git a/packages/state-transition/src/block/processAttesterSlashing.ts b/packages/state-transition/src/block/processAttesterSlashing.ts index 07f353ce1f..83766faf0f 100644 --- a/packages/state-transition/src/block/processAttesterSlashing.ts +++ b/packages/state-transition/src/block/processAttesterSlashing.ts @@ -1,5 +1,6 @@ import {ForkSeq} from "@lodestar/params"; import {AttesterSlashing} from "@lodestar/types"; +import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; import {CachedBeaconStateAllForks} from "../types.js"; import {getAttesterSlashableIndices, isSlashableAttestationData, isSlashableValidator} from "../util/index.js"; import {isValidIndexedAttestationBigint} from "./isValidIndexedAttestation.js"; @@ -17,7 +18,8 @@ export function processAttesterSlashing( attesterSlashing: AttesterSlashing, verifySignatures = true ): void { - assertValidAttesterSlashing(state, attesterSlashing, verifySignatures); + const {epochCtx} = state; + assertValidAttesterSlashing(epochCtx.index2pubkey, state, attesterSlashing, verifySignatures); const intersectingIndices = getAttesterSlashableIndices(attesterSlashing); @@ -25,7 +27,7 @@ export function processAttesterSlashing( const validators = state.validators; // Get the validators sub tree once for all indices // Spec requires to sort indexes beforehand for (const index of intersectingIndices.sort((a, b) => a - b)) { - if (isSlashableValidator(validators.getReadonly(index), state.epochCtx.epoch)) { + if (isSlashableValidator(validators.getReadonly(index), epochCtx.epoch)) { slashValidator(fork, state, index); slashedAny = true; } @@ -37,6 +39,7 @@ export function processAttesterSlashing( } export function assertValidAttesterSlashing( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, attesterSlashing: AttesterSlashing, verifySignatures = true @@ -52,7 +55,7 @@ export function assertValidAttesterSlashing( // be higher than the clock and the slashing would still be valid. Same applies to attestation data index, which // can be any arbitrary value. Must use bigint variants to hash correctly to all possible values for (const [i, attestation] of [attestation1, attestation2].entries()) { - if (!isValidIndexedAttestationBigint(state, attestation, verifySignatures)) { + if (!isValidIndexedAttestationBigint(index2pubkey, state, attestation, verifySignatures)) { throw new Error(`AttesterSlashing attestation${i} is invalid`); } } diff --git a/packages/state-transition/src/block/processProposerSlashing.ts b/packages/state-transition/src/block/processProposerSlashing.ts index b06ac0a780..8bfbc663c8 100644 --- a/packages/state-transition/src/block/processProposerSlashing.ts +++ b/packages/state-transition/src/block/processProposerSlashing.ts @@ -77,7 +77,7 @@ export function assertValidProposerSlashing( // verify signatures if (verifySignatures) { - const signatureSets = getProposerSlashingSignatureSets(state, proposerSlashing); + const signatureSets = getProposerSlashingSignatureSets(state.epochCtx.index2pubkey, state, proposerSlashing); for (let i = 0; i < signatureSets.length; i++) { if (!verifySignatureSet(signatureSets[i])) { throw new Error(`ProposerSlashing header${i + 1} signature invalid`); diff --git a/packages/state-transition/src/block/processRandao.ts b/packages/state-transition/src/block/processRandao.ts index e2230514c6..f0640f9d6d 100644 --- a/packages/state-transition/src/block/processRandao.ts +++ b/packages/state-transition/src/block/processRandao.ts @@ -17,7 +17,7 @@ export function processRandao(state: CachedBeaconStateAllForks, block: BeaconBlo const randaoReveal = block.body.randaoReveal; // verify RANDAO reveal - if (verifySignature && !verifyRandaoSignature(state, block)) { + if (verifySignature && !verifyRandaoSignature(epochCtx.index2pubkey, state, block)) { throw new Error("RANDAO reveal is an invalid signature"); } diff --git a/packages/state-transition/src/block/processSyncCommittee.ts b/packages/state-transition/src/block/processSyncCommittee.ts index 4ccd9f9549..05495bfce8 100644 --- a/packages/state-transition/src/block/processSyncCommittee.ts +++ b/packages/state-transition/src/block/processSyncCommittee.ts @@ -1,6 +1,7 @@ import {byteArrayEquals} from "@chainsafe/ssz"; import {DOMAIN_SYNC_COMMITTEE, SYNC_COMMITTEE_SIZE} from "@lodestar/params"; import {altair, ssz} from "@lodestar/types"; +import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; import {G2_POINT_AT_INFINITY} from "../constants/index.js"; import {CachedBeaconStateAllForks} from "../types.js"; import { @@ -23,7 +24,7 @@ export function processSyncAggregate( if (verifySignatures) { // This is to conform to the spec - we want the signature to be verified const participantIndices = block.body.syncAggregate.syncCommitteeBits.intersectValues(committeeIndices); - const signatureSet = getSyncCommitteeSignatureSet(state, block, participantIndices); + const signatureSet = getSyncCommitteeSignatureSet(state.epochCtx.index2pubkey, state, block, participantIndices); // When there's no participation we consider the signature valid and just ignore i if (signatureSet !== null && !verifySignatureSet(signatureSet)) { throw Error("Sync committee signature invalid"); @@ -63,12 +64,12 @@ export function processSyncAggregate( } export function getSyncCommitteeSignatureSet( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, block: altair.BeaconBlock, /** Optional parameter to prevent computing it twice */ participantIndices?: number[] ): ISignatureSet | null { - const {epochCtx} = state; const {syncAggregate} = block.body; const signature = syncAggregate.syncCommitteeSignature; @@ -110,7 +111,7 @@ export function getSyncCommitteeSignatureSet( return { type: SignatureSetType.aggregate, - pubkeys: participantIndices.map((i) => epochCtx.index2pubkey[i]), + pubkeys: participantIndices.map((i) => index2pubkey[i]), signingRoot: computeSigningRoot(ssz.Root, rootSigned, domain), signature, }; diff --git a/packages/state-transition/src/block/processVoluntaryExit.ts b/packages/state-transition/src/block/processVoluntaryExit.ts index ca90fec477..ec2892ba03 100644 --- a/packages/state-transition/src/block/processVoluntaryExit.ts +++ b/packages/state-transition/src/block/processVoluntaryExit.ts @@ -74,7 +74,7 @@ export function getVoluntaryExitValidity( return VoluntaryExitValidity.pendingWithdrawals; } - if (verifySignature && !verifyVoluntaryExitSignature(state, signedVoluntaryExit)) { + if (verifySignature && !verifyVoluntaryExitSignature(epochCtx.index2pubkey, state, signedVoluntaryExit)) { return VoluntaryExitValidity.invalidSignature; } diff --git a/packages/state-transition/src/signatureSets/attesterSlashings.ts b/packages/state-transition/src/signatureSets/attesterSlashings.ts index b053bb4978..855a3cb590 100644 --- a/packages/state-transition/src/signatureSets/attesterSlashings.ts +++ b/packages/state-transition/src/signatureSets/attesterSlashings.ts @@ -1,29 +1,33 @@ import {DOMAIN_BEACON_ATTESTER} from "@lodestar/params"; import {AttesterSlashing, IndexedAttestationBigint, SignedBeaconBlock, ssz} from "@lodestar/types"; +import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; import {CachedBeaconStateAllForks} from "../types.js"; import {ISignatureSet, SignatureSetType, computeSigningRoot, computeStartSlotAtEpoch} from "../util/index.js"; /** Get signature sets from all AttesterSlashing objects in a block */ export function getAttesterSlashingsSignatureSets( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock ): ISignatureSet[] { return signedBlock.message.body.attesterSlashings.flatMap((attesterSlashing) => - getAttesterSlashingSignatureSets(state, attesterSlashing) + getAttesterSlashingSignatureSets(index2pubkey, state, attesterSlashing) ); } /** Get signature sets from a single AttesterSlashing object */ export function getAttesterSlashingSignatureSets( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, attesterSlashing: AttesterSlashing ): ISignatureSet[] { return [attesterSlashing.attestation1, attesterSlashing.attestation2].map((attestation) => - getIndexedAttestationBigintSignatureSet(state, attestation) + getIndexedAttestationBigintSignatureSet(index2pubkey, state, attestation) ); } export function getIndexedAttestationBigintSignatureSet( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, indexedAttestation: IndexedAttestationBigint ): ISignatureSet { @@ -32,7 +36,7 @@ export function getIndexedAttestationBigintSignatureSet( return { type: SignatureSetType.aggregate, - pubkeys: indexedAttestation.attestingIndices.map((i) => state.epochCtx.index2pubkey[i]), + pubkeys: indexedAttestation.attestingIndices.map((i) => index2pubkey[i]), signingRoot: computeSigningRoot(ssz.phase0.AttestationDataBigint, indexedAttestation.data, domain), signature: indexedAttestation.signature, }; diff --git a/packages/state-transition/src/signatureSets/index.ts b/packages/state-transition/src/signatureSets/index.ts index 9a12d7174a..d2c51a080c 100644 --- a/packages/state-transition/src/signatureSets/index.ts +++ b/packages/state-transition/src/signatureSets/index.ts @@ -1,6 +1,7 @@ import {ForkSeq} from "@lodestar/params"; import {IndexedAttestation, SignedBeaconBlock, altair, capella} from "@lodestar/types"; import {getSyncCommitteeSignatureSet} from "../block/processSyncCommittee.js"; +import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; import {CachedBeaconStateAllForks, CachedBeaconStateAltair} from "../types.js"; import {ISignatureSet} from "../util/index.js"; import {getAttesterSlashingsSignatureSets} from "./attesterSlashings.js"; @@ -14,7 +15,7 @@ import {getVoluntaryExitsSignatureSets} from "./voluntaryExits.js"; export * from "./attesterSlashings.js"; export * from "./blsToExecutionChange.js"; export * from "./indexedAttestation.js"; -export * from "./indexedPayloadAttestation.ts"; +export * from "./indexedPayloadAttestation.js"; export * from "./proposer.js"; export * from "./proposerSlashings.js"; export * from "./randao.js"; @@ -25,6 +26,7 @@ export * from "./voluntaryExits.js"; * Deposits are not included because they can legally have invalid signatures. */ export function getBlockSignatureSets( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock, indexedAttestations: IndexedAttestation[], @@ -37,20 +39,21 @@ export function getBlockSignatureSets( const fork = state.config.getForkSeq(signedBlock.message.slot); const signatureSets = [ - getRandaoRevealSignatureSet(state, signedBlock.message), - ...getProposerSlashingsSignatureSets(state, signedBlock), - ...getAttesterSlashingsSignatureSets(state, signedBlock), - ...getAttestationsSignatureSets(state, signedBlock, indexedAttestations), - ...getVoluntaryExitsSignatureSets(state, signedBlock), + getRandaoRevealSignatureSet(index2pubkey, state, signedBlock.message), + ...getProposerSlashingsSignatureSets(index2pubkey, state, signedBlock), + ...getAttesterSlashingsSignatureSets(index2pubkey, state, signedBlock), + ...getAttestationsSignatureSets(index2pubkey, state, signedBlock, indexedAttestations), + ...getVoluntaryExitsSignatureSets(index2pubkey, state, signedBlock), ]; if (!opts?.skipProposerSignature) { - signatureSets.push(getBlockProposerSignatureSet(state, signedBlock)); + signatureSets.push(getBlockProposerSignatureSet(index2pubkey, state, signedBlock)); } // Only after altair fork, validate tSyncCommitteeSignature if (fork >= ForkSeq.altair) { const syncCommitteeSignatureSet = getSyncCommitteeSignatureSet( + index2pubkey, state as CachedBeaconStateAltair, (signedBlock as altair.SignedBeaconBlock).message ); diff --git a/packages/state-transition/src/signatureSets/indexedAttestation.ts b/packages/state-transition/src/signatureSets/indexedAttestation.ts index b795b42b65..3ba35a501c 100644 --- a/packages/state-transition/src/signatureSets/indexedAttestation.ts +++ b/packages/state-transition/src/signatureSets/indexedAttestation.ts @@ -1,5 +1,6 @@ import {DOMAIN_BEACON_ATTESTER} from "@lodestar/params"; import {IndexedAttestation, SignedBeaconBlock, phase0, ssz} from "@lodestar/types"; +import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; import {CachedBeaconStateAllForks} from "../types.js"; import { ISignatureSet, @@ -19,25 +20,33 @@ export function getAttestationDataSigningRoot( } export function getAttestationWithIndicesSignatureSet( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, attestation: Pick, attestingIndices: number[] ): ISignatureSet { return createAggregateSignatureSetFromComponents( - attestingIndices.map((i) => state.epochCtx.index2pubkey[i]), + attestingIndices.map((i) => index2pubkey[i]), getAttestationDataSigningRoot(state, attestation.data), attestation.signature ); } export function getIndexedAttestationSignatureSet( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, indexedAttestation: IndexedAttestation ): ISignatureSet { - return getAttestationWithIndicesSignatureSet(state, indexedAttestation, indexedAttestation.attestingIndices); + return getAttestationWithIndicesSignatureSet( + index2pubkey, + state, + indexedAttestation, + indexedAttestation.attestingIndices + ); } export function getAttestationsSignatureSets( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock, indexedAttestations: IndexedAttestation[] @@ -47,5 +56,7 @@ export function getAttestationsSignatureSets( `Indexed attestations length mismatch: got ${indexedAttestations.length}, expected ${signedBlock.message.body.attestations.length}` ); } - return indexedAttestations.map((indexedAttestation) => getIndexedAttestationSignatureSet(state, indexedAttestation)); + return indexedAttestations.map((indexedAttestation) => + getIndexedAttestationSignatureSet(index2pubkey, state, indexedAttestation) + ); } diff --git a/packages/state-transition/src/signatureSets/proposer.ts b/packages/state-transition/src/signatureSets/proposer.ts index 5995657be8..ec518d6ac2 100644 --- a/packages/state-transition/src/signatureSets/proposer.ts +++ b/packages/state-transition/src/signatureSets/proposer.ts @@ -1,22 +1,25 @@ import {DOMAIN_BEACON_PROPOSER} from "@lodestar/params"; import {SignedBeaconBlock, SignedBlindedBeaconBlock, Slot, isBlindedBeaconBlock, phase0, ssz} from "@lodestar/types"; +import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; import {CachedBeaconStateAllForks} from "../types.js"; import {computeSigningRoot} from "../util/index.js"; import {ISignatureSet, SignatureSetType, verifySignatureSet} from "../util/signatureSets.js"; export function verifyProposerSignature( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock | SignedBlindedBeaconBlock ): boolean { - const signatureSet = getBlockProposerSignatureSet(state, signedBlock); + const signatureSet = getBlockProposerSignatureSet(index2pubkey, state, signedBlock); return verifySignatureSet(signatureSet); } export function getBlockProposerSignatureSet( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock | SignedBlindedBeaconBlock ): ISignatureSet { - const {config, epochCtx} = state; + const {config} = state; const domain = config.getDomain(state.slot, DOMAIN_BEACON_PROPOSER, signedBlock.message.slot); const blockType = isBlindedBeaconBlock(signedBlock.message) @@ -25,37 +28,40 @@ export function getBlockProposerSignatureSet( return { type: SignatureSetType.single, - pubkey: epochCtx.index2pubkey[signedBlock.message.proposerIndex], + pubkey: index2pubkey[signedBlock.message.proposerIndex], signingRoot: computeSigningRoot(blockType, signedBlock.message, domain), signature: signedBlock.signature, }; } export function getBlockHeaderProposerSignatureSetByParentStateSlot( + index2pubkey: Index2PubkeyCache, parentState: CachedBeaconStateAllForks, signedBlockHeader: phase0.SignedBeaconBlockHeader ) { - return getBlockHeaderProposerSignatureSet(parentState, signedBlockHeader, parentState.slot); + return getBlockHeaderProposerSignatureSet(index2pubkey, parentState, signedBlockHeader, parentState.slot); } export function getBlockHeaderProposerSignatureSetByHeaderSlot( + index2pubkey: Index2PubkeyCache, headState: CachedBeaconStateAllForks, signedBlockHeader: phase0.SignedBeaconBlockHeader ) { - return getBlockHeaderProposerSignatureSet(headState, signedBlockHeader, signedBlockHeader.message.slot); + return getBlockHeaderProposerSignatureSet(index2pubkey, headState, signedBlockHeader, signedBlockHeader.message.slot); } function getBlockHeaderProposerSignatureSet( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedBlockHeader: phase0.SignedBeaconBlockHeader, domainSlot: Slot ): ISignatureSet { - const {config, epochCtx} = state; + const {config} = state; const domain = config.getDomain(domainSlot, DOMAIN_BEACON_PROPOSER, signedBlockHeader.message.slot); return { type: SignatureSetType.single, - pubkey: epochCtx.index2pubkey[signedBlockHeader.message.proposerIndex], + pubkey: index2pubkey[signedBlockHeader.message.proposerIndex], signingRoot: computeSigningRoot(ssz.phase0.BeaconBlockHeader, signedBlockHeader.message, domain), signature: signedBlockHeader.signature, }; diff --git a/packages/state-transition/src/signatureSets/proposerSlashings.ts b/packages/state-transition/src/signatureSets/proposerSlashings.ts index d21c0906c5..ab0294ca9f 100644 --- a/packages/state-transition/src/signatureSets/proposerSlashings.ts +++ b/packages/state-transition/src/signatureSets/proposerSlashings.ts @@ -1,5 +1,6 @@ import {DOMAIN_BEACON_PROPOSER} from "@lodestar/params"; import {SignedBeaconBlock, phase0, ssz} from "@lodestar/types"; +import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; import {CachedBeaconStateAllForks} from "../types.js"; import {ISignatureSet, SignatureSetType, computeSigningRoot} from "../util/index.js"; @@ -7,11 +8,11 @@ import {ISignatureSet, SignatureSetType, computeSigningRoot} from "../util/index * Extract signatures to allow validating all block signatures at once */ export function getProposerSlashingSignatureSets( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, proposerSlashing: phase0.ProposerSlashing ): ISignatureSet[] { - const {epochCtx} = state; - const pubkey = epochCtx.index2pubkey[proposerSlashing.signedHeader1.message.proposerIndex]; + const pubkey = index2pubkey[proposerSlashing.signedHeader1.message.proposerIndex]; // In state transition, ProposerSlashing headers are only partially validated. Their slot could be higher than the // clock and the slashing would still be valid. Must use bigint variants to hash correctly to all possible values @@ -32,10 +33,11 @@ export function getProposerSlashingSignatureSets( } export function getProposerSlashingsSignatureSets( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock ): ISignatureSet[] { return signedBlock.message.body.proposerSlashings.flatMap((proposerSlashing) => - getProposerSlashingSignatureSets(state, proposerSlashing) + getProposerSlashingSignatureSets(index2pubkey, state, proposerSlashing) ); } diff --git a/packages/state-transition/src/signatureSets/randao.ts b/packages/state-transition/src/signatureSets/randao.ts index 495212f6aa..5a538093cc 100644 --- a/packages/state-transition/src/signatureSets/randao.ts +++ b/packages/state-transition/src/signatureSets/randao.ts @@ -1,5 +1,6 @@ import {DOMAIN_RANDAO} from "@lodestar/params"; import {BeaconBlock, ssz} from "@lodestar/types"; +import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; import {CachedBeaconStateAllForks} from "../types.js"; import { ISignatureSet, @@ -9,22 +10,29 @@ import { verifySignatureSet, } from "../util/index.js"; -export function verifyRandaoSignature(state: CachedBeaconStateAllForks, block: BeaconBlock): boolean { - return verifySignatureSet(getRandaoRevealSignatureSet(state, block)); +export function verifyRandaoSignature( + index2pubkey: Index2PubkeyCache, + state: CachedBeaconStateAllForks, + block: BeaconBlock +): boolean { + return verifySignatureSet(getRandaoRevealSignatureSet(index2pubkey, state, block)); } /** * Extract signatures to allow validating all block signatures at once */ -export function getRandaoRevealSignatureSet(state: CachedBeaconStateAllForks, block: BeaconBlock): ISignatureSet { - const {epochCtx} = state; +export function getRandaoRevealSignatureSet( + index2pubkey: Index2PubkeyCache, + state: CachedBeaconStateAllForks, + block: BeaconBlock +): ISignatureSet { // should not get epoch from epochCtx const epoch = computeEpochAtSlot(block.slot); const domain = state.config.getDomain(state.slot, DOMAIN_RANDAO, block.slot); return { type: SignatureSetType.single, - pubkey: epochCtx.index2pubkey[block.proposerIndex], + pubkey: index2pubkey[block.proposerIndex], signingRoot: computeSigningRoot(ssz.Epoch, epoch, domain), signature: block.body.randaoReveal, }; diff --git a/packages/state-transition/src/signatureSets/voluntaryExits.ts b/packages/state-transition/src/signatureSets/voluntaryExits.ts index 9fae3f920d..e80c6e3a4c 100644 --- a/packages/state-transition/src/signatureSets/voluntaryExits.ts +++ b/packages/state-transition/src/signatureSets/voluntaryExits.ts @@ -1,4 +1,5 @@ import {SignedBeaconBlock, phase0, ssz} from "@lodestar/types"; +import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; import {CachedBeaconStateAllForks} from "../types.js"; import { ISignatureSet, @@ -9,36 +10,38 @@ import { } from "../util/index.js"; export function verifyVoluntaryExitSignature( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedVoluntaryExit: phase0.SignedVoluntaryExit ): boolean { - return verifySignatureSet(getVoluntaryExitSignatureSet(state, signedVoluntaryExit)); + return verifySignatureSet(getVoluntaryExitSignatureSet(index2pubkey, state, signedVoluntaryExit)); } /** * Extract signatures to allow validating all block signatures at once */ export function getVoluntaryExitSignatureSet( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedVoluntaryExit: phase0.SignedVoluntaryExit ): ISignatureSet { - const {epochCtx} = state; const slot = computeStartSlotAtEpoch(signedVoluntaryExit.message.epoch); const domain = state.config.getDomainForVoluntaryExit(state.slot, slot); return { type: SignatureSetType.single, - pubkey: epochCtx.index2pubkey[signedVoluntaryExit.message.validatorIndex], + pubkey: index2pubkey[signedVoluntaryExit.message.validatorIndex], signingRoot: computeSigningRoot(ssz.phase0.VoluntaryExit, signedVoluntaryExit.message, domain), signature: signedVoluntaryExit.signature, }; } export function getVoluntaryExitsSignatureSets( + index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock ): ISignatureSet[] { return signedBlock.message.body.voluntaryExits.map((voluntaryExit) => - getVoluntaryExitSignatureSet(state, voluntaryExit) + getVoluntaryExitSignatureSet(index2pubkey, state, voluntaryExit) ); } diff --git a/packages/state-transition/src/stateTransition.ts b/packages/state-transition/src/stateTransition.ts index a7d49c308a..53ff5668c3 100644 --- a/packages/state-transition/src/stateTransition.ts +++ b/packages/state-transition/src/stateTransition.ts @@ -111,7 +111,7 @@ export function stateTransition( postState = processSlotsWithTransientCache(postState, blockSlot, options, {metrics, validatorMonitor}); // Verify proposer signature only - if (verifyProposer && !verifyProposerSignature(postState, signedBlock)) { + if (verifyProposer && !verifyProposerSignature(postState.epochCtx.index2pubkey, postState, signedBlock)) { throw new Error("Invalid block signature"); } diff --git a/packages/state-transition/test/unit/block/isValidIndexedAttestation.test.ts b/packages/state-transition/test/unit/block/isValidIndexedAttestation.test.ts index 15199779d9..ac69dd21f6 100644 --- a/packages/state-transition/test/unit/block/isValidIndexedAttestation.test.ts +++ b/packages/state-transition/test/unit/block/isValidIndexedAttestation.test.ts @@ -45,6 +45,8 @@ describe("validate indexed attestation", () => { data: attestationData, signature: EMPTY_SIGNATURE, }; - expect(isValidIndexedAttestation(state, indexedAttestation, false)).toBe(expectedValue); + expect(isValidIndexedAttestation(state.epochCtx.index2pubkey, state, indexedAttestation, false)).toBe( + expectedValue + ); }); }); diff --git a/packages/state-transition/test/unit/signatureSets/signatureSets.test.ts b/packages/state-transition/test/unit/signatureSets/signatureSets.test.ts index 6d874b7297..00f2cce4d1 100644 --- a/packages/state-transition/test/unit/signatureSets/signatureSets.test.ts +++ b/packages/state-transition/test/unit/signatureSets/signatureSets.test.ts @@ -70,7 +70,7 @@ describe("signatureSets", () => { state.epochCtx.getIndexedAttestation(fork, attestation) ); - const signatureSets = getBlockSignatureSets(state, signedBlock, indexedAttestations); + const signatureSets = getBlockSignatureSets(state.epochCtx.index2pubkey, state, signedBlock, indexedAttestations); expect(signatureSets.length).toBe( // block signature 1 + From 889b1c44754fdfe6acd826fc10b5b0013d9a6faa Mon Sep 17 00:00:00 2001 From: Nico Flaig Date: Fri, 12 Dec 2025 04:18:23 +0100 Subject: [PATCH 03/20] chore: remove merge transition code (#8680) **Motiviation** All networks have completed the merge transition and most execution clients no longer support pre-merge so it's not even possible anymore to run a network from a genesis before bellatrix, unless you keep it to phase0/altair only, which still works after this PR is merged. This code is effectively tech debt, no longer exercised and just gets in the way when doing refactors. **Description** Removes all code related to performing the merge transition. Running the node pre-merge (CL only mode) is still possible and syncing still works. Also removed a few CLI flags we added for the merge specifically, those shouldn't be used anymore. Spec constants like `TERMINAL_TOTAL_DIFFICULTY` are kept for spec compliance and ssz types (like `PowBlock`) as well. I had to disable a few spec tests related to handling the merge block since those code paths are removed. Closes https://github.com/ChainSafe/lodestar/issues/8661 --- .env.test | 4 +- .github/workflows/test-sim-merge.yml | 58 - dashboards/lodestar_execution_engine.json | 987 ++---------------- .../advanced-topics/setting-up-a-testnet.md | 1 - .../beacon/genericServerTest/config.test.ts | 1 - packages/beacon-node/package.json | 1 - .../src/chain/blocks/verifyBlock.ts | 25 +- .../blocks/verifyBlocksExecutionPayloads.ts | 181 +--- packages/beacon-node/src/chain/chain.ts | 13 +- .../beacon-node/src/chain/forkChoice/index.ts | 5 +- packages/beacon-node/src/chain/options.ts | 6 - .../beacon-node/src/chain/prepareNextSlot.ts | 2 +- .../chain/produceBlock/produceBlockBody.ts | 145 +-- .../beacon-node/src/chain/validation/block.ts | 3 +- .../src/eth1/eth1MergeBlockTracker.ts | 328 ------ packages/beacon-node/src/eth1/index.ts | 67 +- packages/beacon-node/src/eth1/interface.ts | 46 +- .../beacon-node/src/execution/engine/http.ts | 16 +- .../beacon-node/src/execution/engine/mock.ts | 18 +- .../src/metrics/metrics/lodestar.ts | 52 - packages/beacon-node/src/node/notifier.ts | 36 +- .../e2e/eth1/eth1MergeBlockTracker.test.ts | 148 --- .../produceBlock/produceBlockBody.test.ts | 2 - .../perf/chain/verifyImportBlocks.test.ts | 3 +- .../{besu/post-merge.sh => besu.sh} | 3 +- .../scripts/el-interop/besu/common-setup.sh | 4 +- .../post-merge.sh => besudocker.sh} | 3 +- .../el-interop/besudocker/common-setup.sh | 4 +- .../post-merge.sh => ethereumjsdocker.sh} | 3 +- .../ethereumjsdocker/common-setup.sh | 4 +- .../{geth/post-merge.sh => geth.sh} | 3 +- .../scripts/el-interop/geth/common-setup.sh | 4 +- .../test/scripts/el-interop/geth/pre-merge.sh | 8 - .../post-merge.sh => gethdocker.sh} | 3 +- .../el-interop/gethdocker/common-setup.sh | 4 +- .../el-interop/gethdocker/pre-merge.sh | 9 - .../el-interop/mergemock/common-setup.sh | 19 - .../el-interop/mergemock/genesisPre.tmpl | 36 - .../test/scripts/el-interop/mergemock/jwt.hex | 1 - .../el-interop/mergemock/post-merge.sh | 9 - .../post-merge.sh => nethermind.sh} | 3 +- .../el-interop/nethermind/pre-merge.sh | 9 - .../post-merge.sh => netherminddocker.sh} | 3 +- .../netherminddocker/common-setup.sh | 2 +- .../el-interop/netherminddocker/pre-merge.sh | 10 - .../test/sim/electra-interop.test.ts | 9 +- .../beacon-node/test/sim/mergemock.test.ts | 273 ----- .../test/spec/presets/fork_choice.test.ts | 64 +- .../test/spec/utils/specTestIterator.ts | 16 +- .../unit/eth1/eth1MergeBlockTracker.test.ts | 256 ----- .../test/utils/networkWithMockDb.ts | 1 - packages/beacon-node/test/utils/runEl.ts | 14 +- packages/cli/src/config/beaconParams.ts | 9 +- .../src/options/beaconNodeOptions/chain.ts | 11 - packages/cli/src/options/paramsOptions.ts | 46 +- packages/cli/test/sim/multiFork.test.ts | 9 - .../unit/options/beaconNodeOptions.test.ts | 2 - .../crucible/assertions/mergeAssertion.ts | 21 - .../utils/crucible/clients/execution/geth.ts | 5 +- .../utils/crucible/clients/execution/index.ts | 6 +- .../crucible/clients/execution/nethermind.ts | 3 +- .../cli/test/utils/crucible/interfaces.ts | 6 - .../utils/crucible/utils/executionGenesis.ts | 19 +- packages/config/src/chainConfig/types.ts | 3 + .../fork-choice/src/forkChoice/forkChoice.ts | 76 +- .../fork-choice/src/forkChoice/interface.ts | 7 - packages/fork-choice/src/index.ts | 3 +- .../test/unit/forkChoice/utils.test.ts | 71 -- packages/params/src/index.ts | 1 + packages/state-transition/src/block/index.ts | 8 +- .../src/block/processExecutionPayload.ts | 22 +- .../state-transition/src/util/execution.ts | 60 -- packages/types/src/bellatrix/types.ts | 1 + packages/validator/src/util/params.ts | 4 +- 74 files changed, 199 insertions(+), 3119 deletions(-) delete mode 100644 .github/workflows/test-sim-merge.yml delete mode 100644 packages/beacon-node/src/eth1/eth1MergeBlockTracker.ts delete mode 100644 packages/beacon-node/test/e2e/eth1/eth1MergeBlockTracker.test.ts rename packages/beacon-node/test/scripts/el-interop/{besu/post-merge.sh => besu.sh} (88%) rename packages/beacon-node/test/scripts/el-interop/{besudocker/post-merge.sh => besudocker.sh} (90%) rename packages/beacon-node/test/scripts/el-interop/{ethereumjsdocker/post-merge.sh => ethereumjsdocker.sh} (86%) rename packages/beacon-node/test/scripts/el-interop/{geth/post-merge.sh => geth.sh} (87%) delete mode 100755 packages/beacon-node/test/scripts/el-interop/geth/pre-merge.sh rename packages/beacon-node/test/scripts/el-interop/{gethdocker/post-merge.sh => gethdocker.sh} (89%) delete mode 100755 packages/beacon-node/test/scripts/el-interop/gethdocker/pre-merge.sh delete mode 100755 packages/beacon-node/test/scripts/el-interop/mergemock/common-setup.sh delete mode 100644 packages/beacon-node/test/scripts/el-interop/mergemock/genesisPre.tmpl delete mode 100644 packages/beacon-node/test/scripts/el-interop/mergemock/jwt.hex delete mode 100755 packages/beacon-node/test/scripts/el-interop/mergemock/post-merge.sh rename packages/beacon-node/test/scripts/el-interop/{nethermind/post-merge.sh => nethermind.sh} (89%) delete mode 100755 packages/beacon-node/test/scripts/el-interop/nethermind/pre-merge.sh rename packages/beacon-node/test/scripts/el-interop/{netherminddocker/post-merge.sh => netherminddocker.sh} (91%) delete mode 100755 packages/beacon-node/test/scripts/el-interop/netherminddocker/pre-merge.sh delete mode 100644 packages/beacon-node/test/sim/mergemock.test.ts delete mode 100644 packages/beacon-node/test/unit/eth1/eth1MergeBlockTracker.test.ts delete mode 100644 packages/cli/test/utils/crucible/assertions/mergeAssertion.ts delete mode 100644 packages/fork-choice/test/unit/forkChoice/utils.test.ts diff --git a/.env.test b/.env.test index 38a2e490d5..c0628a1dc9 100644 --- a/.env.test +++ b/.env.test @@ -1,7 +1,5 @@ # We use these images during sim and e2e tests -# This is the last version which supports pre/post merge chains in the same network -# All newer versions only work with post merge chains -GETH_DOCKER_IMAGE=ethereum/client-go:v1.16.2 +GETH_DOCKER_IMAGE=ethereum/client-go:v1.16.7 # Use either image or local binary for the testing GETH_BINARY_DIR= LIGHTHOUSE_DOCKER_IMAGE=ethpandaops/lighthouse:unstable-d235f2c diff --git a/.github/workflows/test-sim-merge.yml b/.github/workflows/test-sim-merge.yml deleted file mode 100644 index 83cb48a5b4..0000000000 --- a/.github/workflows/test-sim-merge.yml +++ /dev/null @@ -1,58 +0,0 @@ -name: Sim merge execution/builder tests - -concurrency: - # If PR, cancel prev commits. head_ref = source branch name on pull_request, null if push - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -on: - push: - # We intentionally don't run push on feature branches. See PR for rational. - branches: [unstable, stable] - pull_request: - workflow_dispatch: - -env: - GETH_IMAGE: ethereum/client-go:v1.10.25 - NETHERMIND_IMAGE: nethermind/nethermind:1.14.3 - MERGEMOCK_IMAGE: g11tech/mergemock:latest - GETH_WITHDRAWALS_IMAGE: g11tech/geth:withdrawalsfeb8 - ETHEREUMJS_WITHDRAWALS_IMAGE: g11tech/ethereumjs:blobs-b6b63 - NETHERMIND_WITHDRAWALS_IMAGE: nethermindeth/nethermind:withdrawals_yolo - ETHEREUMJS_BLOBS_IMAGE: g11tech/ethereumjs:blobs-b6b63 - -jobs: - sim-merge-tests: - name: Sim merge tests - runs-on: buildjet-4vcpu-ubuntu-2204 - steps: - - uses: actions/checkout@v4 - - uses: "./.github/actions/setup-and-build" - with: - node: 24 - - - name: Pull Geth - run: docker pull $GETH_IMAGE - - - name: Pull Nethermind - run: docker pull $NETHERMIND_IMAGE - - - name: Pull mergemock - run: docker pull $MERGEMOCK_IMAGE - - - name: Test Lodestar <> mergemock relay - run: yarn test:sim:mergemock - working-directory: packages/beacon-node - env: - EL_BINARY_DIR: ${{ env.MERGEMOCK_IMAGE }} - EL_SCRIPT_DIR: mergemock - LODESTAR_PRESET: mainnet - ENGINE_PORT: 8551 - ETH_PORT: 8661 - - - name: Upload debug log test files - if: ${{ always() }} - uses: actions/upload-artifact@v4 - with: - name: debug-test-logs - path: packages/beacon-node/test-logs diff --git a/dashboards/lodestar_execution_engine.json b/dashboards/lodestar_execution_engine.json index 63c1ab636e..434563d54e 100644 --- a/dashboards/lodestar_execution_engine.json +++ b/dashboards/lodestar_execution_engine.json @@ -90,6 +90,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -173,6 +174,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -258,6 +260,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -343,6 +346,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -428,6 +432,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -513,6 +518,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -624,6 +630,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -637,6 +644,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 1, @@ -728,10 +736,12 @@ "fields": "", "values": false }, + "showPercentChange": false, "text": {}, - "textMode": "auto" + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -760,6 +770,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -773,6 +784,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -843,6 +855,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -856,6 +869,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -926,6 +940,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -939,6 +954,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 1, @@ -1010,6 +1026,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -1023,6 +1040,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1105,6 +1123,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -1118,6 +1137,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1185,6 +1205,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -1198,6 +1219,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1277,6 +1299,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -1290,6 +1313,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1382,6 +1406,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -1395,6 +1420,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1487,6 +1513,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -1500,6 +1527,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1618,6 +1646,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -1631,6 +1660,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1759,10 +1789,12 @@ "fields": "", "values": false }, + "showPercentChange": false, "text": {}, - "textMode": "value" + "textMode": "value", + "wideLayout": true }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -1809,10 +1841,12 @@ "fields": "", "values": false }, + "showPercentChange": false, "text": {}, - "textMode": "auto" + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -1861,10 +1895,12 @@ "fields": "", "values": false }, + "showPercentChange": false, "text": {}, - "textMode": "auto" + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -1913,10 +1949,12 @@ "fields": "/^Time$/", "values": false }, + "showPercentChange": false, "text": {}, - "textMode": "auto" + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -1947,6 +1985,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -1960,6 +1999,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2054,6 +2094,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -2067,6 +2108,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 1, @@ -2138,6 +2180,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -2151,6 +2194,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2244,6 +2288,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -2257,6 +2302,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 1, @@ -2326,6 +2372,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -2339,6 +2386,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2409,927 +2457,10 @@ ], "title": "Eth1 fetch rate", "type": "timeseries" - }, - { - "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 92 - }, - "id": 437, - "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "refId": "A" - } - ], - "title": "Merge Tracking", - "type": "row" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "mappings": [ - { - "options": { - "0": { - "index": 0, - "text": "STOPPED" - }, - "1": { - "index": 1, - "text": "SEARCHING" - }, - "2": { - "index": 2, - "text": "FOUND" - }, - "3": { - "index": 3, - "text": "MERGE_COMPLETE" - } - }, - "type": "value" - } - ], - "unit": "none" - }, - "overrides": [] - }, - "gridPos": { - "h": 3, - "w": 2, - "x": 0, - "y": 93 - }, - "id": 439, - "options": { - "colorMode": "value", - "graphMode": "area", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "last" - ], - "fields": "", - "values": false - }, - "text": {}, - "textMode": "value" - }, - "pluginVersion": "9.3.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_merge_status", - "hide": false, - "interval": "", - "legendFormat": "", - "refId": "A" - } - ], - "title": "Merge Status", - "type": "stat" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "thresholds" - }, - "mappings": [] - }, - "overrides": [] - }, - "gridPos": { - "h": 3, - "w": 2, - "x": 2, - "y": 93 - }, - "id": 459, - "options": { - "colorMode": "value", - "graphMode": "area", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "lastNotNull" - ], - "fields": "", - "values": false - }, - "text": {}, - "textMode": "auto" - }, - "pluginVersion": "9.3.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_merge_ttd", - "format": "time_series", - "instant": false, - "interval": "", - "legendFormat": "", - "refId": "A" - } - ], - "title": "Target TTD", - "type": "stat" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "thresholds" - }, - "mappings": [], - "unit": "s" - }, - "overrides": [] - }, - "gridPos": { - "h": 3, - "w": 4, - "x": 4, - "y": 93 - }, - "id": 475, - "options": { - "colorMode": "value", - "graphMode": "none", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "lastNotNull" - ], - "fields": "", - "values": false - }, - "text": {}, - "textMode": "auto" - }, - "pluginVersion": "9.3.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "(lodestar_eth1_merge_ttd - lodestar_eth1_latest_block_ttd)\n/\nrate(lodestar_eth1_latest_block_ttd[32m])", - "hide": false, - "interval": "", - "legendFormat": "", - "refId": "B" - } - ], - "title": "Time to TTD", - "type": "stat" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "thresholds" - }, - "mappings": [], - "unit": "dateTimeAsSystem" - }, - "overrides": [] - }, - "gridPos": { - "h": 6, - "w": 4, - "x": 8, - "y": 93 - }, - "id": 476, - "options": { - "colorMode": "value", - "graphMode": "none", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "lastNotNull" - ], - "fields": "", - "values": false - }, - "text": {}, - "textMode": "auto" - }, - "pluginVersion": "9.3.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "(time()\n+\n(lodestar_eth1_merge_ttd - lodestar_eth1_latest_block_ttd)\n/\nrate(lodestar_eth1_latest_block_ttd[32m])\n)*1000", - "hide": false, - "interval": "", - "legendFormat": "", - "refId": "B" - } - ], - "title": "TTD date aprox", - "type": "stat" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "mappings": [], - "unit": "none" - }, - "overrides": [] - }, - "gridPos": { - "h": 3, - "w": 4, - "x": 12, - "y": 93 - }, - "id": 462, - "options": { - "colorMode": "value", - "graphMode": "area", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "last" - ], - "fields": "", - "values": false - }, - "text": {}, - "textMode": "value" - }, - "pluginVersion": "9.3.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_parent_blocks_fetched_total", - "hide": false, - "interval": "", - "legendFormat": "", - "refId": "A" - } - ], - "title": "Parents fetched", - "type": "stat" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "thresholds" - }, - "mappings": [] - }, - "overrides": [] - }, - "gridPos": { - "h": 3, - "w": 4, - "x": 16, - "y": 93 - }, - "id": 456, - "options": { - "colorMode": "value", - "graphMode": "area", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "lastNotNull" - ], - "fields": "", - "values": false - }, - "text": {}, - "textMode": "name" - }, - "pluginVersion": "9.3.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_merge_block_details", - "format": "time_series", - "instant": true, - "interval": "", - "legendFormat": "{{terminalBlockNumber}}", - "refId": "A" - } - ], - "title": "Terminal Block Num", - "type": "stat" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "thresholds" - }, - "mappings": [] - }, - "overrides": [] - }, - "gridPos": { - "h": 3, - "w": 4, - "x": 20, - "y": 93 - }, - "id": 464, - "options": { - "colorMode": "value", - "graphMode": "area", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "lastNotNull" - ], - "fields": "", - "values": false - }, - "text": {}, - "textMode": "name" - }, - "pluginVersion": "9.3.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_merge_block_details", - "format": "time_series", - "instant": true, - "interval": "", - "legendFormat": "{{terminalBlockNumber}}", - "refId": "A" - } - ], - "title": "Terminal Block Time", - "type": "stat" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "mappings": [] - }, - "overrides": [] - }, - "gridPos": { - "h": 3, - "w": 2, - "x": 0, - "y": 96 - }, - "id": 460, - "options": { - "colorMode": "value", - "graphMode": "area", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "last" - ], - "fields": "", - "values": false - }, - "text": {}, - "textMode": "auto" - }, - "pluginVersion": "9.3.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_merge_td_factor", - "hide": false, - "interval": "", - "legendFormat": "td factor", - "refId": "A" - } - ], - "title": "TD factor", - "type": "stat" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "mappings": [] - }, - "overrides": [] - }, - "gridPos": { - "h": 3, - "w": 2, - "x": 2, - "y": 96 - }, - "id": 450, - "options": { - "colorMode": "value", - "graphMode": "area", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "last" - ], - "fields": "", - "values": false - }, - "text": {}, - "textMode": "auto" - }, - "pluginVersion": "9.3.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_latest_block_ttd", - "hide": false, - "interval": "", - "legendFormat": "latest block ttd", - "refId": "A" - } - ], - "title": "Latest td", - "type": "stat" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "mappings": [], - "unit": "dateTimeFromNow" - }, - "overrides": [] - }, - "gridPos": { - "h": 3, - "w": 2, - "x": 4, - "y": 96 - }, - "id": 463, - "options": { - "colorMode": "value", - "graphMode": "area", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "last" - ], - "fields": "", - "values": false - }, - "text": {}, - "textMode": "auto" - }, - "pluginVersion": "9.3.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_latest_block_timestamp * 1000", - "hide": false, - "interval": "", - "legendFormat": "latest block ttd", - "refId": "A" - } - ], - "title": "Latest block time", - "type": "stat" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "mappings": [] - }, - "overrides": [] - }, - "gridPos": { - "h": 3, - "w": 2, - "x": 6, - "y": 96 - }, - "id": 461, - "options": { - "colorMode": "value", - "graphMode": "area", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "last" - ], - "fields": "", - "values": false - }, - "text": {}, - "textMode": "auto" - }, - "pluginVersion": "9.3.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_latest_block_number", - "hide": false, - "interval": "", - "legendFormat": "latest block ttd", - "refId": "A" - } - ], - "title": "Latest num", - "type": "stat" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "mappings": [] - }, - "overrides": [] - }, - "gridPos": { - "h": 3, - "w": 4, - "x": 12, - "y": 96 - }, - "id": 467, - "options": { - "colorMode": "value", - "graphMode": "area", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "last" - ], - "fields": "", - "values": false - }, - "text": {}, - "textMode": "auto" - }, - "pluginVersion": "9.3.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_get_terminal_pow_block_promise_cache_hit_total", - "hide": false, - "interval": "", - "legendFormat": "cache hits", - "refId": "A" - } - ], - "title": "Promise cache hits", - "type": "stat" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "thresholds" - }, - "mappings": [] - }, - "overrides": [] - }, - "gridPos": { - "h": 3, - "w": 8, - "x": 16, - "y": 96 - }, - "id": 455, - "options": { - "colorMode": "value", - "graphMode": "area", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "lastNotNull" - ], - "fields": "", - "values": false - }, - "text": {}, - "textMode": "name" - }, - "pluginVersion": "9.3.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_merge_block_details", - "format": "time_series", - "instant": true, - "interval": "", - "legendFormat": "{{terminalBlockHash}}", - "refId": "A" - } - ], - "title": "Terminal Block Hash", - "type": "stat" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "drawStyle": "line", - "fillOpacity": 10, - "gradientMode": "opacity", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "lineInterpolation": "linear", - "lineWidth": 1, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" - }, - "showPoints": "never", - "spanNulls": true, - "stacking": { - "group": "A", - "mode": "none" - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "links": [], - "mappings": [], - "unit": "none" - }, - "overrides": [] - }, - "gridPos": { - "h": 8, - "w": 12, - "x": 0, - "y": 99 - }, - "id": 458, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true - }, - "tooltip": { - "mode": "single", - "sort": "none" - } - }, - "pluginVersion": "8.2.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_latest_block_ttd", - "hide": false, - "interval": "", - "legendFormat": "lastest block td", - "refId": "A" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_merge_ttd", - "hide": false, - "interval": "", - "legendFormat": "merge ttd", - "refId": "B" - } - ], - "title": "Latest TD vs TTD", - "type": "timeseries" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "drawStyle": "line", - "fillOpacity": 10, - "gradientMode": "opacity", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "lineInterpolation": "linear", - "lineWidth": 1, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" - }, - "showPoints": "never", - "spanNulls": true, - "stacking": { - "group": "A", - "mode": "none" - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "links": [], - "mappings": [], - "unit": "short" - }, - "overrides": [] - }, - "gridPos": { - "h": 8, - "w": 12, - "x": 12, - "y": 99 - }, - "id": 444, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true - }, - "tooltip": { - "mode": "single", - "sort": "none" - } - }, - "pluginVersion": "8.2.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "rate(lodestar_eth1_poll_merge_block_errors_total[$rate_interval])", - "hide": false, - "interval": "", - "legendFormat": "polling errors", - "refId": "A" - } - ], - "title": "Error rate", - "type": "timeseries" } ], "refresh": "10s", - "schemaVersion": 38, - "style": "dark", + "schemaVersion": 39, "tags": [ "lodestar" ], diff --git a/docs/pages/contribution/advanced-topics/setting-up-a-testnet.md b/docs/pages/contribution/advanced-topics/setting-up-a-testnet.md index 04cac75c4b..e9cb16d1d7 100644 --- a/docs/pages/contribution/advanced-topics/setting-up-a-testnet.md +++ b/docs/pages/contribution/advanced-topics/setting-up-a-testnet.md @@ -101,4 +101,3 @@ To set up a local testnet with a Post-Merge configuration, you may need to add t - `--params.ALTAIR_FORK_EPOCH 0` - `--params.BELLATRIX_FORK_EPOCH 0` -- `--terminal-total-difficulty-override 0` diff --git a/packages/api/test/unit/beacon/genericServerTest/config.test.ts b/packages/api/test/unit/beacon/genericServerTest/config.test.ts index 534bc69f51..eff05ac926 100644 --- a/packages/api/test/unit/beacon/genericServerTest/config.test.ts +++ b/packages/api/test/unit/beacon/genericServerTest/config.test.ts @@ -16,7 +16,6 @@ describe("beacon / config", () => { PRESET_BASE: "mainnet", DEPOSIT_CONTRACT_ADDRESS: "0xff50ed3d0ec03ac01d4c79aad74928bff48a7b2b", GENESIS_FORK_VERSION: "0x00001020", - TERMINAL_TOTAL_DIFFICULTY: "115792089237316195423570985008687907853269984665640564039457584007913129639936", MIN_GENESIS_TIME: "1606824000", }; diff --git a/packages/beacon-node/package.json b/packages/beacon-node/package.json index 3c92b38b19..d956c93bcc 100644 --- a/packages/beacon-node/package.json +++ b/packages/beacon-node/package.json @@ -103,7 +103,6 @@ "test:unit": "vitest run --project unit --project unit-minimal", "test:e2e": "vitest run --project e2e --project e2e-mainnet", "test:sim": "vitest run test/sim/**/*.test.ts", - "test:sim:mergemock": "vitest run test/sim/mergemock.test.ts", "test:sim:blobs": "vitest run test/sim/4844-interop.test.ts", "download-spec-tests": "node --loader=ts-node/esm test/spec/downloadTests.ts", "test:spec:bls": "vitest run --project spec-minimal test/spec/bls/", diff --git a/packages/beacon-node/src/chain/blocks/verifyBlock.ts b/packages/beacon-node/src/chain/blocks/verifyBlock.ts index b7582ab872..e74ce2b759 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlock.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlock.ts @@ -1,4 +1,3 @@ -import {ChainForkConfig} from "@lodestar/config"; import {ExecutionStatus, ProtoBlock} from "@lodestar/fork-choice"; import {ForkName, isForkPostFulu} from "@lodestar/params"; import { @@ -7,8 +6,7 @@ import { computeEpochAtSlot, isStateValidatorsNodesPopulated, } from "@lodestar/state-transition"; -import {IndexedAttestation, bellatrix, deneb} from "@lodestar/types"; -import {Logger, toRootHex} from "@lodestar/utils"; +import {IndexedAttestation, deneb} from "@lodestar/types"; import type {BeaconChain} from "../chain.js"; import {BlockError, BlockErrorCode} from "../errors/index.js"; import {BlockProcessOpts} from "../options.js"; @@ -18,7 +16,6 @@ import {ImportBlockOpts} from "./types.js"; import {DENEB_BLOWFISH_BANNER} from "./utils/blowfishBanner.js"; import {ELECTRA_GIRAFFE_BANNER} from "./utils/giraffeBanner.js"; import {CAPELLA_OWL_BANNER} from "./utils/ownBanner.js"; -import {POS_PANDA_MERGE_TRANSITION_BANNER} from "./utils/pandaMergeTransitionBanner.js"; import {FULU_ZEBRA_BANNER} from "./utils/zebraBanner.js"; import {verifyBlocksDataAvailability} from "./verifyBlocksDataAvailability.js"; import {SegmentExecStatus, verifyBlocksExecutionPayload} from "./verifyBlocksExecutionPayloads.js"; @@ -103,7 +100,6 @@ export async function verifyBlocksInEpoch( : Promise.resolve({ execAborted: null, executionStatuses: blocks.map((_blk) => ExecutionStatus.Syncing), - mergeBlockFound: null, } as SegmentExecStatus); // Store indexed attestations for each block to avoid recomputing them during import @@ -163,12 +159,6 @@ export async function verifyBlocksInEpoch( ]); if (opts.verifyOnly !== true) { - if (segmentExecStatus.execAborted === null && segmentExecStatus.mergeBlockFound !== null) { - // merge block found and is fully valid = state transition + signatures + execution payload. - // TODO: Will this banner be logged during syncing? - logOnPowBlock(this.logger, this.config, segmentExecStatus.mergeBlockFound); - } - const fromForkBoundary = this.config.getForkBoundaryAtEpoch(computeEpochAtSlot(parentBlock.slot)); const toForkBoundary = this.config.getForkBoundaryAtEpoch(computeEpochAtSlot(lastBlock.message.slot)); @@ -251,16 +241,3 @@ export async function verifyBlocksInEpoch( abortController.abort(); } } - -function logOnPowBlock(logger: Logger, config: ChainForkConfig, mergeBlock: bellatrix.BeaconBlock): void { - const mergeBlockHash = toRootHex(config.getForkTypes(mergeBlock.slot).BeaconBlock.hashTreeRoot(mergeBlock)); - const mergeExecutionHash = toRootHex(mergeBlock.body.executionPayload.blockHash); - const mergePowHash = toRootHex(mergeBlock.body.executionPayload.parentHash); - logger.info(POS_PANDA_MERGE_TRANSITION_BANNER); - logger.info("Execution transitioning from PoW to PoS!!!"); - logger.info("Importing block referencing terminal PoW block", { - blockHash: mergeBlockHash, - executionHash: mergeExecutionHash, - powHash: mergePowHash, - }); -} diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts index 1183c72074..a74e3f033d 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts @@ -6,19 +6,11 @@ import { LVHValidResponse, MaybeValidExecutionStatus, ProtoBlock, - assertValidTerminalPowBlock, } from "@lodestar/fork-choice"; -import {ForkSeq, SAFE_SLOTS_TO_IMPORT_OPTIMISTICALLY} from "@lodestar/params"; -import { - CachedBeaconStateAllForks, - isExecutionBlockBodyType, - isExecutionEnabled, - isExecutionStateType, - isMergeTransitionBlock as isMergeTransitionBlockFn, -} from "@lodestar/state-transition"; -import {Slot, bellatrix, electra} from "@lodestar/types"; +import {ForkSeq} from "@lodestar/params"; +import {CachedBeaconStateAllForks, isExecutionBlockBodyType, isExecutionStateType} from "@lodestar/state-transition"; +import {bellatrix, electra} from "@lodestar/types"; import {ErrorAborted, Logger, toRootHex} from "@lodestar/utils"; -import {IEth1ForBlockProduction} from "../../eth1/index.js"; import {ExecutionPayloadStatus, IExecutionEngine} from "../../execution/engine/interface.js"; import {Metrics} from "../../metrics/metrics.js"; import {IClock} from "../../util/clock.js"; @@ -29,7 +21,6 @@ import {IBlockInput} from "./blockInput/types.js"; import {ImportBlockOpts} from "./types.js"; export type VerifyBlockExecutionPayloadModules = { - eth1: IEth1ForBlockProduction; executionEngine: IExecutionEngine; clock: IClock; logger: Logger; @@ -44,9 +35,8 @@ export type SegmentExecStatus = execAborted: null; executionStatuses: MaybeValidExecutionStatus[]; executionTime: number; - mergeBlockFound: bellatrix.BeaconBlock | null; } - | {execAborted: ExecAbortType; invalidSegmentLVH?: LVHInvalidResponse; mergeBlockFound: null}; + | {execAborted: ExecAbortType; invalidSegmentLVH?: LVHInvalidResponse}; type VerifyExecutionErrorResponse = | {executionStatus: ExecutionStatus.Invalid; lvhResponse: LVHInvalidResponse; execError: BlockError} @@ -72,7 +62,6 @@ export async function verifyBlocksExecutionPayload( opts: BlockProcessOpts & ImportBlockOpts ): Promise { const executionStatuses: MaybeValidExecutionStatus[] = []; - let mergeBlockFound: bellatrix.BeaconBlock | null = null; const recvToValLatency = Date.now() / 1000 - (opts.seenTimestampSec ?? Date.now() / 1000); const lastBlock = blockInputs.at(-1); @@ -96,57 +85,9 @@ export async function verifyBlocksExecutionPayload( // will either validate or prune invalid blocks // // We need to track and keep updating if its safe to optimistically import these blocks. - // The following is how we determine for a block if its safe: - // - // (but we need to modify this check for this segment of blocks because it checks if the - // parent of any block imported in forkchoice is post-merge and currently we could only - // have blocks[0]'s parent imported in the chain as this is no longer one by one verify + - // import.) - // // // When to import such blocks: // From: https://github.com/ethereum/consensus-specs/pull/2844 - // A block MUST NOT be optimistically imported, unless either of the following - // conditions are met: - // - // 1. Parent of the block has execution - // - // Since with the sync optimizations, the previous block might not have been in the - // forkChoice yet, so the below check could fail for safeSlotsToImportOptimistically - // - // Luckily, we can depend on the preState0 to see if we are already post merge w.r.t - // the blocks we are importing. - // - // Or in other words if - // - block status is syncing - // - and we are not in a post merge world and is parent is not optimistically safe - // - and we are syncing close to the chain head i.e. clock slot - // - and parent is optimistically safe - // - // then throw error - // - // - // - if we haven't yet imported a post merge ancestor in forkchoice i.e. - // - and we are syncing close to the clockSlot, i.e. merge Transition could be underway - // - // - // 2. The current slot (as per the system clock) is at least - // SAFE_SLOTS_TO_IMPORT_OPTIMISTICALLY ahead of the slot of the block being - // imported. - // This means that the merge transition could be underway and we can't afford to import - // a block which is not fully validated as it could affect liveliness of the network. - // - // - // For this segment of blocks: - // We are optimistically safe with respect to this entire block segment if: - // - all the blocks are way behind the current slot - // - or we have already imported a post-merge parent of first block of this chain in forkchoice - const currentSlot = chain.clock.currentSlot; - const safeSlotsToImportOptimistically = opts.safeSlotsToImportOptimistically ?? SAFE_SLOTS_TO_IMPORT_OPTIMISTICALLY; - let isOptimisticallySafe = - parentBlock.executionStatus !== ExecutionStatus.PreMerge || - lastBlock.slot + safeSlotsToImportOptimistically < currentSlot; - for (let blockIndex = 0; blockIndex < blockInputs.length; blockIndex++) { const blockInput = blockInputs[blockIndex]; // If blocks are invalid in consensus the main promise could resolve before this loop ends. @@ -154,14 +95,7 @@ export async function verifyBlocksExecutionPayload( if (signal.aborted) { throw new ErrorAborted("verifyBlockExecutionPayloads"); } - const verifyResponse = await verifyBlockExecutionPayload( - chain, - blockInput, - preState0, - opts, - isOptimisticallySafe, - currentSlot - ); + const verifyResponse = await verifyBlockExecutionPayload(chain, blockInput, preState0); // If execError has happened, then we need to extract the segmentExecStatus and return if (verifyResponse.execError !== null) { @@ -170,75 +104,7 @@ export async function verifyBlocksExecutionPayload( // If we are here then its because executionStatus is one of MaybeValidExecutionStatus const {executionStatus} = verifyResponse; - // It becomes optimistically safe for following blocks if a post-merge block is deemed fit - // for import. If it would not have been safe verifyBlockExecutionPayload would have - // returned execError and loop would have been aborted - if (executionStatus !== ExecutionStatus.PreMerge) { - isOptimisticallySafe = true; - } executionStatuses.push(executionStatus); - - const blockBody = blockInput.getBlock().message.body; - const isMergeTransitionBlock = - // If the merge block is found, stop the search as the isMergeTransitionBlockFn condition - // will still evaluate to true for the following blocks leading to errors (while syncing) - // as the preState0 still belongs to the pre state of the first block on segment - mergeBlockFound === null && - isExecutionStateType(preState0) && - isExecutionBlockBodyType(blockBody) && - isMergeTransitionBlockFn(preState0, blockBody); - - // If this is a merge transition block, check to ensure if it references - // a valid terminal PoW block. - // - // However specs define this check to be run inside forkChoice's onBlock - // (https://github.com/ethereum/consensus-specs/blob/dev/specs/bellatrix/fork-choice.md#on_block) - // but we perform the check here (as inspired from the lighthouse impl) - // - // Reasons: - // 1. If the block is not valid, we should fail early and not wait till - // forkChoice import. - // 2. It makes logical sense to pair it with the block validations and - // deal it with the external services like eth1 tracker here than - // in import block - if (isMergeTransitionBlock) { - const mergeBlock = blockInput.getBlock().message as bellatrix.BeaconBlock; - const mergeBlockHash = toRootHex(chain.config.getForkTypes(mergeBlock.slot).BeaconBlock.hashTreeRoot(mergeBlock)); - const powBlockRootHex = toRootHex(mergeBlock.body.executionPayload.parentHash); - const powBlock = await chain.eth1.getPowBlock(powBlockRootHex).catch((error) => { - // Lets just warn the user here, errors if any will be reported on - // `assertValidTerminalPowBlock` checks - chain.logger.warn( - "Error fetching terminal PoW block referred in the merge transition block", - {powBlockHash: powBlockRootHex, mergeBlockHash}, - error - ); - return null; - }); - - const powBlockParent = - powBlock && - (await chain.eth1.getPowBlock(powBlock.parentHash).catch((error) => { - // Lets just warn the user here, errors if any will be reported on - // `assertValidTerminalPowBlock` checks - chain.logger.warn( - "Error fetching parent of the terminal PoW block referred in the merge transition block", - {powBlockParentHash: powBlock.parentHash, powBlock: powBlockRootHex, mergeBlockHash}, - error - ); - return null; - })); - - // executionStatus will never == ExecutionStatus.PreMerge if it's the mergeBlock. But gotta make TS happy =D - if (executionStatus === ExecutionStatus.PreMerge) { - throw Error("Merge block must not have executionStatus == PreMerge"); - } - - assertValidTerminalPowBlock(chain.config, mergeBlock, {executionStatus, powBlock, powBlockParent}); - // Valid execution payload, but may not be in a valid beacon chain block. Delay printing the POS ACTIVATED banner - // to the end of the verify block routine, which confirms that this block is fully valid. - mergeBlockFound = mergeBlock; - } } const executionTime = Date.now(); @@ -265,7 +131,6 @@ export async function verifyBlocksExecutionPayload( execAborted: null, executionStatuses, executionTime, - mergeBlockFound, }; } @@ -275,28 +140,18 @@ export async function verifyBlocksExecutionPayload( export async function verifyBlockExecutionPayload( chain: VerifyBlockExecutionPayloadModules, blockInput: IBlockInput, - preState0: CachedBeaconStateAllForks, - opts: BlockProcessOpts, - isOptimisticallySafe: boolean, - currentSlot: Slot + preState0: CachedBeaconStateAllForks ): Promise { const block = blockInput.getBlock(); /** Not null if execution is enabled */ const executionPayloadEnabled = - isExecutionStateType(preState0) && - isExecutionBlockBodyType(block.message.body) && - // Safe to use with a state previous to block's preState. isMergeComplete can only transition from false to true. - // - If preState0 is after merge block: condition is true, and will always be true - // - If preState0 is before merge block: the block could lie but then state transition function will throw above - // It is kinda safe to send non-trusted payloads to the execution client because at most it can trigger sync. - // TODO: If this becomes a problem, do some basic verification beforehand, like checking the proposer signature. - isExecutionEnabled(preState0, block.message) + isExecutionStateType(preState0) && isExecutionBlockBodyType(block.message.body) ? block.message.body.executionPayload : null; if (!executionPayloadEnabled) { - // isExecutionEnabled() -> false - return {executionStatus: ExecutionStatus.PreMerge, execError: null} as VerifyBlockExecutionResponse; + // Pre-merge block, no execution payload to verify + return {executionStatus: ExecutionStatus.PreMerge, lvhResponse: undefined, execError: null}; } // TODO: Handle better notifyNewPayload() returning error is syncing @@ -343,24 +198,10 @@ export async function verifyBlockExecutionPayload( } // Accepted and Syncing have the same treatment, as final validation of block is pending + // Post-merge, we're always safe to optimistically import case ExecutionPayloadStatus.ACCEPTED: - case ExecutionPayloadStatus.SYNCING: { - // Check if the entire segment was deemed safe or, this block specifically itself if not in - // the safeSlotsToImportOptimistically window of current slot, then we can import else - // we need to throw and not import his block - const safeSlotsToImportOptimistically = - opts.safeSlotsToImportOptimistically ?? SAFE_SLOTS_TO_IMPORT_OPTIMISTICALLY; - if (!isOptimisticallySafe && blockInput.slot + safeSlotsToImportOptimistically >= currentSlot) { - const execError = new BlockError(block, { - code: BlockErrorCode.EXECUTION_ENGINE_ERROR, - execStatus: ExecutionPayloadStatus.UNSAFE_OPTIMISTIC_STATUS, - errorMessage: `not safe to import ${execResult.status} payload within ${opts.safeSlotsToImportOptimistically} of currentSlot`, - }); - return {executionStatus: null, execError} as VerifyBlockExecutionResponse; - } - + case ExecutionPayloadStatus.SYNCING: return {executionStatus: ExecutionStatus.Syncing, execError: null}; - } // If the block has is not valid, or it referenced an invalid terminal block then the // block is invalid, however it has no bearing on any forkChoice cleanup diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index 33bbf59241..1e3a017c1a 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -3,7 +3,7 @@ import {PrivateKey} from "@libp2p/interface"; import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; import {CompositeTypeAny, TreeView, Type} from "@chainsafe/ssz"; import {BeaconConfig} from "@lodestar/config"; -import {CheckpointWithHex, ExecutionStatus, IForkChoice, ProtoBlock, UpdateHeadOpt} from "@lodestar/fork-choice"; +import {CheckpointWithHex, IForkChoice, ProtoBlock, UpdateHeadOpt} from "@lodestar/fork-choice"; import {LoggerNode} from "@lodestar/logger/node"; import {EFFECTIVE_BALANCE_INCREMENT, GENESIS_SLOT, SLOTS_PER_EPOCH, isForkPostElectra} from "@lodestar/params"; import { @@ -1177,17 +1177,6 @@ export class BeaconChain implements IBeaconChain { this.seenAggregatedAttestations.prune(epoch); this.seenBlockAttesters.prune(epoch); this.beaconProposerCache.prune(epoch); - - // Poll for merge block in the background to speed-up block production. Only if: - // - after BELLATRIX_FORK_EPOCH - // - Beacon node synced - // - head state not isMergeTransitionComplete - if (this.config.BELLATRIX_FORK_EPOCH - epoch < 1) { - const head = this.forkChoice.getHead(); - if (epoch - computeEpochAtSlot(head.slot) < 5 && head.executionStatus === ExecutionStatus.PreMerge) { - this.eth1.startPollingMergeBlock(); - } - } } protected onNewHead(head: ProtoBlock): void { diff --git a/packages/beacon-node/src/chain/forkChoice/index.ts b/packages/beacon-node/src/chain/forkChoice/index.ts index d83dc9949d..56da740ee9 100644 --- a/packages/beacon-node/src/chain/forkChoice/index.ts +++ b/packages/beacon-node/src/chain/forkChoice/index.ts @@ -18,7 +18,6 @@ import { getBlockRootAtSlot, getEffectiveBalanceIncrementsZeroInactive, isExecutionStateType, - isMergeTransitionComplete, } from "@lodestar/state-transition"; import {Slot, ssz} from "@lodestar/types"; import {Logger, toRootHex} from "@lodestar/utils"; @@ -135,7 +134,7 @@ export function initializeForkChoiceFromFinalizedState( unrealizedFinalizedEpoch: finalizedCheckpoint.epoch, unrealizedFinalizedRoot: toRootHex(finalizedCheckpoint.root), - ...(isExecutionStateType(state) && isMergeTransitionComplete(state) + ...(isExecutionStateType(state) ? { executionPayloadBlockHash: toRootHex(state.latestExecutionPayloadHeader.blockHash), executionPayloadNumber: state.latestExecutionPayloadHeader.blockNumber, @@ -216,7 +215,7 @@ export function initializeForkChoiceFromUnfinalizedState( unrealizedFinalizedEpoch: finalizedCheckpoint.epoch, unrealizedFinalizedRoot: toRootHex(finalizedCheckpoint.root), - ...(isExecutionStateType(unfinalizedState) && isMergeTransitionComplete(unfinalizedState) + ...(isExecutionStateType(unfinalizedState) ? { executionPayloadBlockHash: toRootHex(unfinalizedState.latestExecutionPayloadHeader.blockHash), executionPayloadNumber: unfinalizedState.latestExecutionPayloadHeader.blockNumber, diff --git a/packages/beacon-node/src/chain/options.ts b/packages/beacon-node/src/chain/options.ts index 6a7ceadf7a..5d95927232 100644 --- a/packages/beacon-node/src/chain/options.ts +++ b/packages/beacon-node/src/chain/options.ts @@ -1,4 +1,3 @@ -import {SAFE_SLOTS_TO_IMPORT_OPTIMISTICALLY} from "@lodestar/params"; import {defaultOptions as defaultValidatorOptions} from "@lodestar/validator"; import {DEFAULT_ARCHIVE_MODE} from "./archiveStore/constants.js"; import {ArchiveMode, ArchiveStoreOpts} from "./archiveStore/interface.js"; @@ -56,10 +55,6 @@ export type BlockProcessOpts = { * Will double processing times. Use only for debugging purposes. */ disableBlsBatchVerify?: boolean; - /** - * Override SAFE_SLOTS_TO_IMPORT_OPTIMISTICALLY - */ - safeSlotsToImportOptimistically?: number; /** * Assert progressive balances the same to EpochTransitionCache */ @@ -109,7 +104,6 @@ export const defaultChainOptions: IChainOptions = { proposerBoost: true, proposerBoostReorg: true, computeUnrealized: true, - safeSlotsToImportOptimistically: SAFE_SLOTS_TO_IMPORT_OPTIMISTICALLY, suggestedFeeRecipient: defaultValidatorOptions.suggestedFeeRecipient, serveHistoricalState: false, assertCorrectProgressiveBalances: false, diff --git a/packages/beacon-node/src/chain/prepareNextSlot.ts b/packages/beacon-node/src/chain/prepareNextSlot.ts index 43fbb0226a..78967f2ff2 100644 --- a/packages/beacon-node/src/chain/prepareNextSlot.ts +++ b/packages/beacon-node/src/chain/prepareNextSlot.ts @@ -197,7 +197,7 @@ export class PrepareNextSlotScheduler { this.chain.opts.emitPayloadAttributes === true && this.chain.emitter.listenerCount(routes.events.EventType.payloadAttributes) ) { - const data = await getPayloadAttributesForSSE(fork as ForkPostBellatrix, this.chain, { + const data = getPayloadAttributesForSSE(fork as ForkPostBellatrix, this.chain, { prepareState: updatedPrepareState, prepareSlot, parentBlockRoot: fromHex(headRoot), diff --git a/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts b/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts index 3736a14129..556e4a42f7 100644 --- a/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts +++ b/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts @@ -17,10 +17,8 @@ import { CachedBeaconStateCapella, CachedBeaconStateExecutions, computeTimeAtSlot, - getCurrentEpoch, getExpectedWithdrawals, getRandaoMix, - isMergeTransitionComplete, } from "@lodestar/state-transition"; import { BLSPubkey, @@ -44,12 +42,9 @@ import { deneb, electra, fulu, - ssz, - sszTypesFor, } from "@lodestar/types"; import {Logger, sleep, toHex, toPubkeyHex, toRootHex} from "@lodestar/utils"; -import {ZERO_HASH, ZERO_HASH_HEX} from "../../constants/index.js"; -import {IEth1ForBlockProduction} from "../../eth1/index.js"; +import {ZERO_HASH_HEX} from "../../constants/index.js"; import {numToQuantity} from "../../eth1/provider/utils.js"; import { IExecutionBuilder, @@ -337,14 +332,6 @@ export async function produceBlockBody( feeRecipient ); - if (prepareRes.isPremerge) { - return { - ...prepareRes, - executionPayload: sszTypesFor(fork).ExecutionPayload.defaultValue(), - executionPayloadValue: BigInt(0), - }; - } - const {prepType, payloadId} = prepareRes; Object.assign(logMeta, {executionPayloadPrepType: prepType}); @@ -366,37 +353,14 @@ export async function produceBlockBody( return {...prepareRes, ...payloadRes}; })().catch((e) => { - // catch payload fetch here, because there is still a recovery path possible if we - // are pre-merge. We don't care the same for builder segment as the execution block - // will takeover if the builder flow was activated and errors this.metrics?.blockPayload.payloadFetchErrors.inc(); - - if (!isMergeTransitionComplete(currentState as CachedBeaconStateBellatrix)) { - this.logger?.warn( - "Fetch payload from the execution failed, however since we are still pre-merge proceeding with an empty one.", - {}, - e as Error - ); - // ok we don't have an execution payload here, so we can assign an empty one - // if pre-merge - return { - isPremerge: true as const, - executionPayload: sszTypesFor(fork).ExecutionPayload.defaultValue(), - executionPayloadValue: BigInt(0), - }; - } - // since merge transition is complete, we need a valid payload even if with an - // empty (transactions) one. defaultValue isn't gonna cut it! throw e; }); const [engineRes, commonBlockBody] = await Promise.all([enginePromise, commonBlockBodyPromise]); blockBody = Object.assign({}, commonBlockBody) as AssembledBodyType; - if (engineRes.isPremerge) { - (blockBody as BeaconBlockBody).executionPayload = engineRes.executionPayload; - executionPayloadValue = engineRes.executionPayloadValue; - } else { + { const {prepType, payloadId, executionPayload, blobsBundle, executionRequests} = engineRes; shouldOverrideBuilder = engineRes.shouldOverrideBuilder; @@ -504,15 +468,10 @@ export async function produceBlockBody( } /** - * Produce ExecutionPayload for pre-merge, merge, and post-merge. - * - * Expects `eth1MergeBlockFinder` to be actively searching for blocks well in advance to being called. - * - * @returns PayloadId = pow block found, null = pow NOT found + * Produce ExecutionPayload for post-merge. */ export async function prepareExecutionPayload( chain: { - eth1: IEth1ForBlockProduction; executionEngine: IExecutionEngine; config: ChainForkConfig; }, @@ -523,14 +482,8 @@ export async function prepareExecutionPayload( finalizedBlockHash: RootHex, state: CachedBeaconStateExecutions, suggestedFeeRecipient: string -): Promise<{isPremerge: true} | {isPremerge: false; prepType: PayloadPreparationType; payloadId: PayloadId}> { - const parentHashRes = await getExecutionPayloadParentHash(chain, state); - if (parentHashRes.isPremerge) { - // Return null only if the execution is pre-merge - return {isPremerge: true}; - } - - const {parentHash} = parentHashRes; +): Promise<{prepType: PayloadPreparationType; payloadId: PayloadId}> { + const parentHash = state.latestExecutionPayloadHeader.blockHash; const timestamp = computeTimeAtSlot(chain.config, state.slot, state.genesisTime); const prevRandao = getRandaoMix(state, state.epochCtx.epoch); @@ -586,12 +539,11 @@ export async function prepareExecutionPayload( // We are only returning payloadId here because prepareExecutionPayload is also called from // prepareNextSlot, which is an advance call to execution engine to start building payload // Actual payload isn't produced till getPayload is called. - return {isPremerge: false, payloadId, prepType}; + return {payloadId, prepType}; } async function prepareExecutionPayloadHeader( chain: { - eth1: IEth1ForBlockProduction; executionBuilder?: IExecutionBuilder; config: ChainForkConfig; }, @@ -608,53 +560,13 @@ async function prepareExecutionPayloadHeader( throw Error("executionBuilder required"); } - const parentHashRes = await getExecutionPayloadParentHash(chain, state); - if (parentHashRes.isPremerge) { - throw Error("External builder disabled pre-merge"); - } - - const {parentHash} = parentHashRes; + const parentHash = state.latestExecutionPayloadHeader.blockHash; return chain.executionBuilder.getHeader(fork, state.slot, parentHash, proposerPubKey); } -export async function getExecutionPayloadParentHash( - chain: { - eth1: IEth1ForBlockProduction; - config: ChainForkConfig; - }, - state: CachedBeaconStateExecutions -): Promise<{isPremerge: true} | {isPremerge: false; parentHash: Root}> { - // Use different POW block hash parent for block production based on merge status. - // Returned value of null == using an empty ExecutionPayload value - if (isMergeTransitionComplete(state)) { - // Post-merge, normal payload - return {isPremerge: false, parentHash: state.latestExecutionPayloadHeader.blockHash}; - } - - if ( - !ssz.Root.equals(chain.config.TERMINAL_BLOCK_HASH, ZERO_HASH) && - getCurrentEpoch(state) < chain.config.TERMINAL_BLOCK_HASH_ACTIVATION_EPOCH - ) { - throw new Error( - `InvalidMergeTBH epoch: expected >= ${ - chain.config.TERMINAL_BLOCK_HASH_ACTIVATION_EPOCH - }, actual: ${getCurrentEpoch(state)}` - ); - } - - const terminalPowBlockHash = await chain.eth1.getTerminalPowBlock(); - if (terminalPowBlockHash === null) { - // Pre-merge, no prepare payload call is needed - return {isPremerge: true}; - } - // Signify merge via producing on top of the last PoW block - return {isPremerge: false, parentHash: terminalPowBlockHash}; -} - -export async function getPayloadAttributesForSSE( +export function getPayloadAttributesForSSE( fork: ForkPostBellatrix, chain: { - eth1: IEth1ForBlockProduction; config: ChainForkConfig; }, { @@ -663,30 +575,23 @@ export async function getPayloadAttributesForSSE( parentBlockRoot, feeRecipient, }: {prepareState: CachedBeaconStateExecutions; prepareSlot: Slot; parentBlockRoot: Root; feeRecipient: string} -): Promise { - const parentHashRes = await getExecutionPayloadParentHash(chain, prepareState); - - if (!parentHashRes.isPremerge) { - const {parentHash} = parentHashRes; - const payloadAttributes = preparePayloadAttributes(fork, chain, { - prepareState, - prepareSlot, - parentBlockRoot, - feeRecipient, - }); - - const ssePayloadAttributes: SSEPayloadAttributes = { - proposerIndex: prepareState.epochCtx.getBeaconProposer(prepareSlot), - proposalSlot: prepareSlot, - parentBlockNumber: prepareState.latestExecutionPayloadHeader.blockNumber, - parentBlockRoot, - parentBlockHash: parentHash, - payloadAttributes, - }; - return ssePayloadAttributes; - } - - throw Error("The execution is still pre-merge"); +): SSEPayloadAttributes { + const parentHash = prepareState.latestExecutionPayloadHeader.blockHash; + const payloadAttributes = preparePayloadAttributes(fork, chain, { + prepareState, + prepareSlot, + parentBlockRoot, + feeRecipient, + }); + const ssePayloadAttributes: SSEPayloadAttributes = { + proposerIndex: prepareState.epochCtx.getBeaconProposer(prepareSlot), + proposalSlot: prepareSlot, + parentBlockNumber: prepareState.latestExecutionPayloadHeader.blockNumber, + parentBlockRoot, + parentBlockHash: parentHash, + payloadAttributes, + }; + return ssePayloadAttributes; } function preparePayloadAttributes( diff --git a/packages/beacon-node/src/chain/validation/block.ts b/packages/beacon-node/src/chain/validation/block.ts index 974b2c1e07..b68f30d6b0 100644 --- a/packages/beacon-node/src/chain/validation/block.ts +++ b/packages/beacon-node/src/chain/validation/block.ts @@ -6,7 +6,6 @@ import { computeTimeAtSlot, getBlockProposerSignatureSet, isExecutionBlockBodyType, - isExecutionEnabled, isExecutionStateType, } from "@lodestar/state-transition"; import {SignedBeaconBlock, deneb} from "@lodestar/types"; @@ -140,7 +139,7 @@ export async function validateGossipBlock( if (fork === ForkName.bellatrix) { if (!isExecutionBlockBodyType(block.body)) throw Error("Not merge block type"); const executionPayload = block.body.executionPayload; - if (isExecutionStateType(blockState) && isExecutionEnabled(blockState, block)) { + if (isExecutionStateType(blockState)) { const expectedTimestamp = computeTimeAtSlot(config, blockSlot, chain.genesisTime); if (executionPayload.timestamp !== computeTimeAtSlot(config, blockSlot, chain.genesisTime)) { throw new BlockGossipError(GossipAction.REJECT, { diff --git a/packages/beacon-node/src/eth1/eth1MergeBlockTracker.ts b/packages/beacon-node/src/eth1/eth1MergeBlockTracker.ts deleted file mode 100644 index ea2d134fe4..0000000000 --- a/packages/beacon-node/src/eth1/eth1MergeBlockTracker.ts +++ /dev/null @@ -1,328 +0,0 @@ -import {ChainConfig} from "@lodestar/config"; -import {RootHex} from "@lodestar/types"; -import {Logger, pruneSetToMax, toRootHex} from "@lodestar/utils"; -import {ZERO_HASH_HEX} from "../constants/index.js"; -import {Metrics} from "../metrics/index.js"; -import {enumToIndexMap} from "../util/enum.js"; -import {EthJsonRpcBlockRaw, IEth1Provider, PowMergeBlock, PowMergeBlockTimestamp, TDProgress} from "./interface.js"; -import {dataToRootHex, quantityToBigint, quantityToNum} from "./provider/utils.js"; - -export enum StatusCode { - STOPPED = "STOPPED", - SEARCHING = "SEARCHING", - FOUND = "FOUND", -} - -type Status = - | {code: StatusCode.STOPPED} - | {code: StatusCode.SEARCHING} - | {code: StatusCode.FOUND; mergeBlock: PowMergeBlock}; - -/** For metrics, index order = declaration order of StatusCode */ -const statusCodeIdx = enumToIndexMap(StatusCode); - -/** - * Bounds `blocksByHashCache` cache, imposing a max distance between highest and lowest block numbers. - * In case of extreme forking the cache might grow unbounded. - */ -const MAX_CACHE_POW_BLOCKS = 1024; - -const MAX_TD_RENDER_VALUE = Number.MAX_SAFE_INTEGER; - -export type Eth1MergeBlockTrackerModules = { - config: ChainConfig; - logger: Logger; - signal: AbortSignal; - metrics: Metrics | null; -}; - -// get_pow_block_at_total_difficulty - -/** - * Follows the eth1 chain to find a (or multiple?) merge blocks that cross the threshold of total terminal difficulty - * - * Finding the mergeBlock could be done in demand when proposing pre-merge blocks. However, that would slow block - * production during the weeks between BELLATRIX_EPOCH and TTD. - */ -export class Eth1MergeBlockTracker { - private readonly config: ChainConfig; - private readonly logger: Logger; - private readonly metrics: Metrics | null; - - private readonly blocksByHashCache = new Map(); - private readonly intervals: NodeJS.Timeout[] = []; - - private status: Status; - private latestEth1Block: PowMergeBlockTimestamp | null = null; - private getTerminalPowBlockFromEth1Promise: Promise | null = null; - private readonly safeTDFactor: bigint; - - constructor( - {config, logger, signal, metrics}: Eth1MergeBlockTrackerModules, - private readonly eth1Provider: IEth1Provider - ) { - this.config = config; - this.logger = logger; - this.metrics = metrics; - - this.status = {code: StatusCode.STOPPED}; - - signal.addEventListener("abort", () => this.close(), {once: true}); - - this.safeTDFactor = getSafeTDFactor(this.config.TERMINAL_TOTAL_DIFFICULTY); - const scaledTTD = this.config.TERMINAL_TOTAL_DIFFICULTY / this.safeTDFactor; - - // Only run metrics if necessary - if (metrics) { - // TTD can't be dynamically changed during execution, register metric once - metrics.eth1.eth1MergeTTD.set(Number(scaledTTD as bigint)); - metrics.eth1.eth1MergeTDFactor.set(Number(this.safeTDFactor as bigint)); - - metrics.eth1.eth1MergeStatus.addCollect(() => { - // Set merge ttd, merge status and merge block status - metrics.eth1.eth1MergeStatus.set(statusCodeIdx[this.status.code]); - - if (this.latestEth1Block !== null) { - // Set latestBlock stats - metrics.eth1.eth1LatestBlockNumber.set(this.latestEth1Block.number); - metrics.eth1.eth1LatestBlockTD.set(Number(this.latestEth1Block.totalDifficulty / this.safeTDFactor)); - metrics.eth1.eth1LatestBlockTimestamp.set(this.latestEth1Block.timestamp); - } - }); - } - } - - /** - * Returns the most recent POW block that satisfies the merge block condition - */ - async getTerminalPowBlock(): Promise { - switch (this.status.code) { - case StatusCode.STOPPED: - // If not module is not polling fetch the mergeBlock explicitly - return this.getTerminalPowBlockFromEth1(); - - case StatusCode.SEARCHING: - // Assume that polling would have found the block - return null; - - case StatusCode.FOUND: - return this.status.mergeBlock; - } - } - - getTDProgress(): TDProgress | null { - if (this.latestEth1Block === null) { - return this.latestEth1Block; - } - - const tdDiff = this.config.TERMINAL_TOTAL_DIFFICULTY - this.latestEth1Block.totalDifficulty; - - if (tdDiff > BigInt(0)) { - return { - ttdHit: false, - tdFactor: this.safeTDFactor, - tdDiffScaled: Number((tdDiff / this.safeTDFactor) as bigint), - ttd: this.config.TERMINAL_TOTAL_DIFFICULTY, - td: this.latestEth1Block.totalDifficulty, - timestamp: this.latestEth1Block.timestamp, - }; - } - return { - ttdHit: true, - }; - } - - /** - * Get a POW block by hash checking the local cache first - */ - async getPowBlock(powBlockHash: string): Promise { - // Check cache first - const cachedBlock = this.blocksByHashCache.get(powBlockHash); - if (cachedBlock) { - return cachedBlock; - } - - // Fetch from node - const blockRaw = await this.eth1Provider.getBlockByHash(powBlockHash); - if (blockRaw) { - const block = toPowBlock(blockRaw); - this.cacheBlock(block); - return block; - } - - return null; - } - - /** - * Should only start polling for mergeBlock if: - * - after BELLATRIX_FORK_EPOCH - * - Beacon node synced - * - head state not isMergeTransitionComplete - */ - startPollingMergeBlock(): void { - if (this.status.code !== StatusCode.STOPPED) { - return; - } - - this.status = {code: StatusCode.SEARCHING}; - this.logger.info("Starting search for terminal POW block", { - TERMINAL_TOTAL_DIFFICULTY: this.config.TERMINAL_TOTAL_DIFFICULTY, - }); - - const interval = setInterval(() => { - // Preemptively try to find merge block and cache it if found. - // Future callers of getTerminalPowBlock() will re-use the cached found mergeBlock. - this.getTerminalPowBlockFromEth1().catch((e) => { - this.logger.error("Error on findMergeBlock", {}, e as Error); - this.metrics?.eth1.eth1PollMergeBlockErrors.inc(); - }); - }, this.config.SECONDS_PER_ETH1_BLOCK * 1000); - - this.intervals.push(interval); - } - - private close(): void { - this.intervals.forEach(clearInterval); - } - - private async getTerminalPowBlockFromEth1(): Promise { - if (!this.getTerminalPowBlockFromEth1Promise) { - this.getTerminalPowBlockFromEth1Promise = this.internalGetTerminalPowBlockFromEth1() - .then((mergeBlock) => { - // Persist found merge block here to affect both caller paths: - // - internal searcher - // - external caller if STOPPED - if (mergeBlock && this.status.code !== StatusCode.FOUND) { - if (this.status.code === StatusCode.SEARCHING) { - this.close(); - } - - this.logger.info("Terminal POW block found!", { - hash: mergeBlock.blockHash, - number: mergeBlock.number, - totalDifficulty: mergeBlock.totalDifficulty, - }); - - this.status = {code: StatusCode.FOUND, mergeBlock}; - this.metrics?.eth1.eth1MergeBlockDetails.set( - { - terminalBlockHash: mergeBlock.blockHash, - // Convert all number/bigints to string labels - terminalBlockNumber: mergeBlock.number.toString(10), - terminalBlockTD: mergeBlock.totalDifficulty.toString(10), - }, - 1 - ); - } - - return mergeBlock; - }) - .finally(() => { - this.getTerminalPowBlockFromEth1Promise = null; - }); - } else { - // This should no happen, since getTerminalPowBlockFromEth1() should resolve faster than SECONDS_PER_ETH1_BLOCK. - // else something is wrong: the el-cl comms are two slow, or the backsearch got stuck in a deep search. - this.metrics?.eth1.getTerminalPowBlockPromiseCacheHit.inc(); - } - - return this.getTerminalPowBlockFromEth1Promise; - } - - /** - * **internal** + **unsafe** since it can create multiple backward searches that overload the eth1 client. - * Must be called in a wrapper to ensure that there's only once concurrent call to this fn. - */ - private async internalGetTerminalPowBlockFromEth1(): Promise { - // Search merge block by hash - // Terminal block hash override takes precedence over terminal total difficulty - const terminalBlockHash = toRootHex(this.config.TERMINAL_BLOCK_HASH); - if (terminalBlockHash !== ZERO_HASH_HEX) { - const block = await this.getPowBlock(terminalBlockHash); - if (block) { - return block; - } - // if a TERMINAL_BLOCK_HASH other than ZERO_HASH is configured and we can't find it, return NONE - return null; - } - - // Search merge block by TTD - const latestBlockRaw = await this.eth1Provider.getBlockByNumber("latest"); - if (!latestBlockRaw) { - throw Error("getBlockByNumber('latest') returned null"); - } - - let block = toPowBlock(latestBlockRaw); - this.latestEth1Block = {...block, timestamp: quantityToNum(latestBlockRaw.timestamp)}; - this.cacheBlock(block); - - // This code path to look backwards for the merge block is only necessary if: - // - The network has not yet found the merge block - // - There are descendants of the merge block in the eth1 chain - // For the search below to require more than a few hops, multiple block proposers in a row must fail to detect - // an existing merge block. Such situation is extremely unlikely, so this search is left un-optimized. Since - // this class can start eagerly looking for the merge block when not necessary, startPollingMergeBlock() should - // only be called when there is certainty that a mergeBlock search is necessary. - - while (true) { - if (block.totalDifficulty < this.config.TERMINAL_TOTAL_DIFFICULTY) { - // TTD not reached yet - return null; - } - - // else block.totalDifficulty >= this.config.TERMINAL_TOTAL_DIFFICULTY - // Potential mergeBlock! Must find the first block that passes TTD - - // Allow genesis block to reach TTD https://github.com/ethereum/consensus-specs/pull/2719 - if (block.parentHash === ZERO_HASH_HEX) { - return block; - } - - const parent = await this.getPowBlock(block.parentHash); - if (!parent) { - throw Error(`Unknown parent of block with TD>TTD ${block.parentHash}`); - } - - this.metrics?.eth1.eth1ParentBlocksFetched.inc(); - - // block.td > TTD && parent.td < TTD => block is mergeBlock - if (parent.totalDifficulty < this.config.TERMINAL_TOTAL_DIFFICULTY) { - // Is terminal total difficulty block AND has verified block -> parent relationship - return block; - } - block = parent; - } - } - - private cacheBlock(block: PowMergeBlock): void { - this.blocksByHashCache.set(block.blockHash, block); - pruneSetToMax(this.blocksByHashCache, MAX_CACHE_POW_BLOCKS); - } -} - -export function toPowBlock(block: EthJsonRpcBlockRaw): PowMergeBlock { - // Validate untrusted data from API - return { - number: quantityToNum(block.number), - blockHash: dataToRootHex(block.hash), - parentHash: dataToRootHex(block.parentHash), - totalDifficulty: quantityToBigint(block.totalDifficulty), - }; -} - -/** - * TTD values can be very large, for xDAI > 1e45. So scale down. - * To be good, TTD should be rendered as a number < Number.MAX_TD_RENDER_VALUE ~= 9e15 - */ -export function getSafeTDFactor(ttd: bigint): bigint { - const safeIntegerMult = ttd / BigInt(MAX_TD_RENDER_VALUE); - - // TTD < MAX_TD_RENDER_VALUE, no need to scale down - if (safeIntegerMult === BigInt(0)) { - return BigInt(1); - } - - // Return closest power of 10 to ensure TD < max - const safeIntegerMultDigits = safeIntegerMult.toString(10).length; - return BigInt(10) ** BigInt(safeIntegerMultDigits); -} diff --git a/packages/beacon-node/src/eth1/index.ts b/packages/beacon-node/src/eth1/index.ts index 81b2ab6d7b..02dffb3d4a 100644 --- a/packages/beacon-node/src/eth1/index.ts +++ b/packages/beacon-node/src/eth1/index.ts @@ -1,9 +1,6 @@ import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; -import {Root} from "@lodestar/types"; -import {fromHex} from "@lodestar/utils"; import {Eth1DepositDataTracker, Eth1DepositDataTrackerModules} from "./eth1DepositDataTracker.js"; -import {Eth1MergeBlockTracker, Eth1MergeBlockTrackerModules} from "./eth1MergeBlockTracker.js"; -import {Eth1DataAndDeposits, IEth1ForBlockProduction, IEth1Provider, PowMergeBlock, TDProgress} from "./interface.js"; +import {Eth1DataAndDeposits, IEth1ForBlockProduction, IEth1Provider} from "./interface.js"; import {Eth1Options} from "./options.js"; import {Eth1Provider} from "./provider/eth1Provider.js"; export {Eth1Provider}; @@ -23,23 +20,6 @@ export type {IEth1ForBlockProduction, IEth1Provider}; // // - Fetch ALL deposit events from the deposit contract to build the deposit tree and validate future merkle proofs. // Then it must follow deposit events at a distance roughly similar to the `ETH1_FOLLOW_DISTANCE` parameter above. -// -// - [New bellatrix]: After BELLATRIX_FORK_EPOCH, it must fetch the block with hash -// `state.eth1_data.block_hash` to compute `terminal_total_difficulty`. Note this may change with -// https://github.com/ethereum/consensus-specs/issues/2603. -// -// - [New bellatrix]: On block production post BELLATRIX_FORK_EPOCH, pre merge, the beacon node must find the merge block -// crossing the `terminal_total_difficulty` boundary and include it in the block. After the merge block production -// will just use `execution_engine.assemble_block` without fetching individual blocks. -// -// - [New bellatrix]: Fork-choice must validate the merge block ensuring it crossed the `terminal_total_difficulty` -// boundary, so it must fetch the POW block referenced in the merge block + its POW parent block. -// -// With the merge the beacon node has to follow the eth1 chain at two distances: -// 1. At `ETH1_FOLLOW_DISTANCE` for eth1Data to be re-org safe -// 2. At the head to get the first merge block, tolerating possible re-orgs -// -// Then both streams of blocks should not be merged since it's harder to guard against re-orgs from (2) to (1). export function initializeEth1ForBlockProduction( opts: Eth1Options, @@ -59,12 +39,8 @@ export function initializeEth1ForBlockProduction( export class Eth1ForBlockProduction implements IEth1ForBlockProduction { private readonly eth1DepositDataTracker: Eth1DepositDataTracker | null; - private readonly eth1MergeBlockTracker: Eth1MergeBlockTracker; - constructor( - opts: Eth1Options, - modules: Eth1DepositDataTrackerModules & Eth1MergeBlockTrackerModules & {eth1Provider?: IEth1Provider} - ) { + constructor(opts: Eth1Options, modules: Eth1DepositDataTrackerModules & {eth1Provider?: IEth1Provider}) { const eth1Provider = modules.eth1Provider || new Eth1Provider( @@ -77,8 +53,6 @@ export class Eth1ForBlockProduction implements IEth1ForBlockProduction { this.eth1DepositDataTracker = opts.disableEth1DepositDataTracker ? null : new Eth1DepositDataTracker(opts, modules, eth1Provider); - - this.eth1MergeBlockTracker = new Eth1MergeBlockTracker(modules, eth1Provider); } async getEth1DataAndDeposits(state: CachedBeaconStateAllForks): Promise { @@ -88,23 +62,6 @@ export class Eth1ForBlockProduction implements IEth1ForBlockProduction { return this.eth1DepositDataTracker.getEth1DataAndDeposits(state); } - async getTerminalPowBlock(): Promise { - const block = await this.eth1MergeBlockTracker.getTerminalPowBlock(); - return block && fromHex(block.blockHash); - } - - getPowBlock(powBlockHash: string): Promise { - return this.eth1MergeBlockTracker.getPowBlock(powBlockHash); - } - - getTDProgress(): TDProgress | null { - return this.eth1MergeBlockTracker.getTDProgress(); - } - - startPollingMergeBlock(): void { - this.eth1MergeBlockTracker.startPollingMergeBlock(); - } - isPollingEth1Data(): boolean { return this.eth1DepositDataTracker?.isPollingEth1Data() ?? false; } @@ -127,30 +84,10 @@ export class Eth1ForBlockProductionDisabled implements IEth1ForBlockProduction { return {eth1Data: state.eth1Data, deposits: []}; } - /** - * Will miss the oportunity to propose the merge block but will still produce valid blocks - */ - async getTerminalPowBlock(): Promise { - return null; - } - - /** Will not be able to validate the merge block */ - async getPowBlock(_powBlockHash: string): Promise { - throw Error("eth1 must be enabled to verify merge block"); - } - - getTDProgress(): TDProgress | null { - return null; - } - isPollingEth1Data(): boolean { return false; } - startPollingMergeBlock(): void { - // Ignore - } - stopPollingEth1Data(): void { // Ignore } diff --git a/packages/beacon-node/src/eth1/interface.ts b/packages/beacon-node/src/eth1/interface.ts index c247e30a69..eecec8fc77 100644 --- a/packages/beacon-node/src/eth1/interface.ts +++ b/packages/beacon-node/src/eth1/interface.ts @@ -1,6 +1,6 @@ import {BeaconConfig} from "@lodestar/config"; import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; -import {Root, RootHex, phase0} from "@lodestar/types"; +import {phase0} from "@lodestar/types"; export type EthJsonRpcBlockRaw = { /** the block number. null when its pending block. `"0x1b4"` */ @@ -47,22 +47,6 @@ export type Eth1DataAndDeposits = { export interface IEth1ForBlockProduction { getEth1DataAndDeposits(state: CachedBeaconStateAllForks): Promise; - /** Returns the most recent POW block that satisfies the merge block condition */ - getTerminalPowBlock(): Promise; - /** Get a POW block by hash checking the local cache first */ - getPowBlock(powBlockHash: string): Promise; - - /** Get current TD progress for log notifier */ - getTDProgress(): TDProgress | null; - - /** - * Should only start polling for mergeBlock if: - * - after BELLATRIX_FORK_EPOCH - * - Beacon node synced - * - head state not isMergeTransitionComplete - */ - startPollingMergeBlock(): void; - isPollingEth1Data(): boolean; /** @@ -78,34 +62,6 @@ export type Eth1Block = { timestamp: number; }; -export type PowMergeBlock = { - number: number; - blockHash: RootHex; - parentHash: RootHex; - totalDifficulty: bigint; -}; - -export type PowMergeBlockTimestamp = PowMergeBlock & { - /** in seconds */ - timestamp: number; -}; - -export type TDProgress = - | { - ttdHit: false; - /** Power of ten by which tdDiffScaled is scaled down */ - tdFactor: bigint; - /** (TERMINAL_TOTAL_DIFFICULTY - block.totalDifficulty) / tdFactor */ - tdDiffScaled: number; - /** TERMINAL_TOTAL_DIFFICULTY */ - ttd: bigint; - /** totalDifficulty of latest fetched eth1 block */ - td: bigint; - /** timestamp in sec of latest fetched eth1 block */ - timestamp: number; - } - | {ttdHit: true}; - export type BatchDepositEvents = { depositEvents: phase0.DepositEvent[]; blockNumber: number; diff --git a/packages/beacon-node/src/execution/engine/http.ts b/packages/beacon-node/src/execution/engine/http.ts index e4e69f9d5f..32739eba4c 100644 --- a/packages/beacon-node/src/execution/engine/http.ts +++ b/packages/beacon-node/src/execution/engine/http.ts @@ -194,15 +194,12 @@ export class ExecutionEngineHttp implements IExecutionEngine { * 1. {status: INVALID_BLOCK_HASH, latestValidHash: null, validationError: * errorMessage | null} if the blockHash validation has failed * - * 2. {status: INVALID_TERMINAL_BLOCK, latestValidHash: null, validationError: - * errorMessage | null} if terminal block conditions are not satisfied - * - * 3. {status: SYNCING, latestValidHash: null, validationError: null} if the payload + * 2. {status: SYNCING, latestValidHash: null, validationError: null} if the payload * extends the canonical chain and requisite data for its validation is missing * with the payload status obtained from the Payload validation process if the payload * has been fully validated while processing the call * - * 4. {status: ACCEPTED, latestValidHash: null, validationError: null} if the + * 3. {status: ACCEPTED, latestValidHash: null, validationError: null} if the * following conditions are met: * i) the blockHash of the payload is valid * ii) the payload doesn't extend the canonical chain @@ -330,16 +327,11 @@ export class ExecutionEngineHttp implements IExecutionEngine { * errorMessage | null}, payloadId: null} * obtained from the Payload validation process if the payload is deemed INVALID * - * 3. {payloadStatus: {status: INVALID_TERMINAL_BLOCK, latestValidHash: null, - * validationError: errorMessage | null}, payloadId: null} - * either obtained from the Payload validation process or as a result of validating a - * PoW block referenced by forkchoiceState.headBlockHash - * - * 4. {payloadStatus: {status: VALID, latestValidHash: forkchoiceState.headBlockHash, + * 3. {payloadStatus: {status: VALID, latestValidHash: forkchoiceState.headBlockHash, * validationError: null}, payloadId: null} * if the payload is deemed VALID and a build process hasn't been started * - * 5. {payloadStatus: {status: VALID, latestValidHash: forkchoiceState.headBlockHash, + * 4. {payloadStatus: {status: VALID, latestValidHash: forkchoiceState.headBlockHash, * validationError: null}, payloadId: buildProcessId} * if the payload is deemed VALID and the build process has begun. * diff --git a/packages/beacon-node/src/execution/engine/mock.ts b/packages/beacon-node/src/execution/engine/mock.ts index d28632d07a..b6d30dece5 100644 --- a/packages/beacon-node/src/execution/engine/mock.ts +++ b/packages/beacon-node/src/execution/engine/mock.ts @@ -9,7 +9,7 @@ import { ForkSeq, } from "@lodestar/params"; import {ExecutionPayload, RootHex, bellatrix, deneb, ssz} from "@lodestar/types"; -import {fromHex, toHex, toRootHex} from "@lodestar/utils"; +import {fromHex, toRootHex} from "@lodestar/utils"; import {ZERO_HASH_HEX} from "../../constants/index.js"; import {quantityToNum} from "../../eth1/provider/utils.js"; import {INTEROP_BLOCK_HASH} from "../../node/utils/interop/state.js"; @@ -70,7 +70,7 @@ export class ExecutionEngineMockBackend implements JsonRpcBackend { finalizedBlockHash = ZERO_HASH_HEX; readonly payloadIdCache = new PayloadIdCache(); - /** Known valid blocks, both pre-merge and post-merge */ + /** Known valid blocks */ private readonly validBlocks = new Map(); /** Preparing payloads to be retrieved via engine_getPayloadV1 */ private readonly preparingPayloads = new Map(); @@ -135,18 +135,6 @@ export class ExecutionEngineMockBackend implements JsonRpcBackend { return [] as ExecutionPayloadBodyRpc[]; } - /** - * Mock manipulator to add more known blocks to this mock. - */ - addPowBlock(powBlock: bellatrix.PowBlock): void { - this.validBlocks.set(toHex(powBlock.blockHash), { - parentHash: toHex(powBlock.parentHash), - blockHash: toHex(powBlock.blockHash), - timestamp: 0, - blockNumber: 0, - }); - } - /** * Mock manipulator to add predefined responses before execution engine client calls */ @@ -258,7 +246,7 @@ export class ExecutionEngineMockBackend implements JsonRpcBackend { // section of the EIP. Additionally, if this validation fails, client software MUST NOT update the forkchoice // state and MUST NOT begin a payload build process. // - // > TODO + // > N/A: All networks have completed the merge transition // 4. Before updating the forkchoice state, client software MUST ensure the validity of the payload referenced by // forkchoiceState.headBlockHash, and MAY validate the payload while processing the call. The validation process diff --git a/packages/beacon-node/src/metrics/metrics/lodestar.ts b/packages/beacon-node/src/metrics/metrics/lodestar.ts index 721b6ffbcb..7e352e318e 100644 --- a/packages/beacon-node/src/metrics/metrics/lodestar.ts +++ b/packages/beacon-node/src/metrics/metrics/lodestar.ts @@ -1668,58 +1668,6 @@ export function createLodestarMetrics( name: "lodestar_eth1_logs_batch_size_dynamic", help: "Dynamic batch size to fetch deposit logs", }), - - // Merge Search info - eth1MergeStatus: register.gauge({ - name: "lodestar_eth1_merge_status", - help: "Eth1 Merge Status 0 PRE_MERGE 1 SEARCHING 2 FOUND 3 POST_MERGE", - }), - eth1MergeTDFactor: register.gauge({ - name: "lodestar_eth1_merge_td_factor", - help: "TTD set for the merge", - }), - eth1MergeTTD: register.gauge({ - name: "lodestar_eth1_merge_ttd", - help: "TTD set for the merge scaled down by td_factor", - }), - - eth1PollMergeBlockErrors: register.gauge({ - name: "lodestar_eth1_poll_merge_block_errors_total", - help: "Total count of errors polling merge block", - }), - getTerminalPowBlockPromiseCacheHit: register.gauge({ - name: "lodestar_eth1_get_terminal_pow_block_promise_cache_hit_total", - help: "Total count of skipped runs in poll merge block, because a previous promise existed", - }), - eth1ParentBlocksFetched: register.gauge({ - name: "lodestar_eth1_parent_blocks_fetched_total", - help: "Total count of parent blocks fetched searching for merge block", - }), - - // Latest block details - eth1LatestBlockTD: register.gauge({ - name: "lodestar_eth1_latest_block_ttd", - help: "Eth1 latest Block td scaled down by td_factor", - }), - eth1LatestBlockNumber: register.gauge({ - name: "lodestar_eth1_latest_block_number", - help: "Eth1 latest block number", - }), - eth1LatestBlockTimestamp: register.gauge({ - name: "lodestar_eth1_latest_block_timestamp", - help: "Eth1 latest block timestamp", - }), - - // Merge details - eth1MergeBlockDetails: register.gauge<{ - terminalBlockHash: string; - terminalBlockNumber: string; - terminalBlockTD: string; - }>({ - name: "lodestar_eth1_merge_block_details", - help: "If found then 1 with terminal block details", - labelNames: ["terminalBlockHash", "terminalBlockNumber", "terminalBlockTD"], - }), }, eth1HttpClient: { diff --git a/packages/beacon-node/src/node/notifier.ts b/packages/beacon-node/src/node/notifier.ts index 36ed384656..aabb1850a0 100644 --- a/packages/beacon-node/src/node/notifier.ts +++ b/packages/beacon-node/src/node/notifier.ts @@ -6,7 +6,6 @@ import { computeEpochAtSlot, computeStartSlotAtEpoch, isExecutionCachedStateType, - isMergeTransitionComplete, } from "@lodestar/state-transition"; import {Epoch} from "@lodestar/types"; import {ErrorAborted, Logger, prettyBytes, prettyBytesShort, sleep} from "@lodestar/utils"; @@ -36,7 +35,6 @@ export async function runNodeNotifier(modules: NodeNotifierModules): Promise { - const logger = testLogger(); - - function getConfig(ttd: bigint): ChainConfig { - return { - // Set time units to 1s to make the test faster - SECONDS_PER_ETH1_BLOCK: 1, - SLOT_DURATION_MS: 1000, - DEPOSIT_CONTRACT_ADDRESS: Buffer.alloc(32, 0), - TERMINAL_TOTAL_DIFFICULTY: ttd, - TERMINAL_BLOCK_HASH: ZERO_HASH, - } as Partial as ChainConfig; - } - const eth1Config = {DEPOSIT_CONTRACT_ADDRESS: ZERO_HASH}; - - // Compute lazily since getGoerliRpcUrl() throws if GOERLI_RPC_URL is not set - let eth1Options: Eth1Options; - beforeAll(() => { - eth1Options = { - enabled: true, - providerUrls: [getGoerliRpcUrl()], - depositContractDeployBlock: 0, - unsafeAllowDepositDataOverwrite: false, - }; - }); - - let controller: AbortController; - beforeEach(() => { - controller = new AbortController(); - }); - afterEach(() => controller.abort()); - - it("Should find terminal pow block through TERMINAL_BLOCK_HASH", async () => { - const eth1Provider = new Eth1Provider(eth1Config, eth1Options, controller.signal); - const latestBlock = await eth1Provider.getBlockByNumber("latest"); - if (!latestBlock) throw Error("No latestBlock"); - const terminalTotalDifficulty = quantityToBigint(latestBlock.totalDifficulty) - BigInt(1000); - const config = getConfig(terminalTotalDifficulty); - config.TERMINAL_BLOCK_HASH = fromHexString(latestBlock.hash); - const eth1MergeBlockTracker = new Eth1MergeBlockTracker( - { - config, - logger, - signal: controller.signal, - metrics: null, - }, - eth1Provider as IEth1Provider - ); - - // Wait for Eth1MergeBlockTracker to find at least one merge block - while (!controller.signal.aborted) { - if (await eth1MergeBlockTracker.getTerminalPowBlock()) break; - await sleep(500, controller.signal); - } - - // Status should acknowlege merge block is found - expect(eth1MergeBlockTracker["status"]).toBe(StatusCode.FOUND); - - // Given the total difficulty offset the block that has TTD is the `difficultyOffset`nth block - const mergeBlock = await eth1MergeBlockTracker.getTerminalPowBlock(); - if (!mergeBlock) throw Error("terminal pow block not found"); - expect(mergeBlock.totalDifficulty).toBe(quantityToBigint(latestBlock.totalDifficulty)); - }); - - it("Should find merge block polling future 'latest' blocks", async () => { - const eth1Provider = new Eth1Provider(eth1Config, eth1Options, controller.signal); - const latestBlock = await eth1Provider.getBlockByNumber("latest"); - if (!latestBlock) throw Error("No latestBlock"); - - // Set TTD to current totalDifficulty + 1, so the next block is the merge block - const terminalTotalDifficulty = quantityToBigint(latestBlock.totalDifficulty) + BigInt(1); - - const eth1MergeBlockTracker = new Eth1MergeBlockTracker( - { - config: getConfig(terminalTotalDifficulty), - logger, - signal: controller.signal, - metrics: null, - }, - eth1Provider as IEth1Provider - ); - - // Wait for Eth1MergeBlockTracker to find at least one merge block - while (!controller.signal.aborted) { - if (await eth1MergeBlockTracker.getTerminalPowBlock()) break; - await sleep(500, controller.signal); - } - - // Status should acknowlege merge block is found - expect(eth1MergeBlockTracker["status"]).toBe(StatusCode.FOUND); - - // Given the total difficulty offset the block that has TTD is the `difficultyOffset`nth block - const mergeBlock = await eth1MergeBlockTracker.getTerminalPowBlock(); - if (!mergeBlock) throw Error("mergeBlock not found"); - // "mergeBlock.totalDifficulty is not >= TTD" - expect(mergeBlock.totalDifficulty).toBeGreaterThanOrEqual(terminalTotalDifficulty); - }); - - it("Should find merge block fetching past blocks", async () => { - const eth1Provider = new Eth1Provider(eth1Config, eth1Options, controller.signal); - const latestBlock = await eth1Provider.getBlockByNumber("latest"); - if (!latestBlock) throw Error("No latestBlock"); - - // Set TTD to current totalDifficulty + 1, so the previous block is the merge block - const terminalTotalDifficulty = quantityToBigint(latestBlock.totalDifficulty) - BigInt(1); - - const eth1MergeBlockTracker = new Eth1MergeBlockTracker( - { - config: getConfig(terminalTotalDifficulty), - logger, - signal: controller.signal, - metrics: null, - }, - eth1Provider as IEth1Provider - ); - - // Wait for Eth1MergeBlockTracker to find at least one merge block - while (!controller.signal.aborted) { - if (await eth1MergeBlockTracker.getTerminalPowBlock()) break; - await sleep(500, controller.signal); - } - - // Status should acknowlege merge block is found - expect(eth1MergeBlockTracker["status"]).toBe(StatusCode.FOUND); - - // Given the total difficulty offset the block that has TTD is the `difficultyOffset`nth block - const mergeBlock = await eth1MergeBlockTracker.getTerminalPowBlock(); - if (!mergeBlock) throw Error("mergeBlock not found"); - // "mergeBlock.totalDifficulty is not >= TTD" - expect(mergeBlock.totalDifficulty).toBeGreaterThanOrEqual(terminalTotalDifficulty); - }); -}); diff --git a/packages/beacon-node/test/perf/chain/produceBlock/produceBlockBody.test.ts b/packages/beacon-node/test/perf/chain/produceBlock/produceBlockBody.test.ts index 14dd293e15..4d52b4a64d 100644 --- a/packages/beacon-node/test/perf/chain/produceBlock/produceBlockBody.test.ts +++ b/packages/beacon-node/test/perf/chain/produceBlock/produceBlockBody.test.ts @@ -3,7 +3,6 @@ import {afterAll, beforeAll, bench, describe} from "@chainsafe/benchmark"; import {fromHexString} from "@chainsafe/ssz"; import {config} from "@lodestar/config/default"; import {LevelDbController} from "@lodestar/db/controller/level"; -import {SAFE_SLOTS_TO_IMPORT_OPTIMISTICALLY} from "@lodestar/params"; import {CachedBeaconStateAltair} from "@lodestar/state-transition"; import {defaultOptions as defaultValidatorOptions} from "@lodestar/validator"; import {generatePerfTestCachedStateAltair} from "../../../../../state-transition/test/perf/util.js"; @@ -31,7 +30,6 @@ describe("produceBlockBody", () => { proposerBoost: true, proposerBoostReorg: true, computeUnrealized: false, - safeSlotsToImportOptimistically: SAFE_SLOTS_TO_IMPORT_OPTIMISTICALLY, disableArchiveOnCheckpoint: true, suggestedFeeRecipient: defaultValidatorOptions.suggestedFeeRecipient, skipCreateStateCacheIfAvailable: true, diff --git a/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts b/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts index 7d9b21ff94..e7cd6a1b2c 100644 --- a/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts +++ b/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts @@ -2,7 +2,7 @@ import {generateKeyPair} from "@libp2p/crypto/keys"; import {afterAll, beforeAll, bench, describe, setBenchOpts} from "@chainsafe/benchmark"; import {config} from "@lodestar/config/default"; import {LevelDbController} from "@lodestar/db/controller/level"; -import {SAFE_SLOTS_TO_IMPORT_OPTIMISTICALLY, SLOTS_PER_EPOCH} from "@lodestar/params"; +import {SLOTS_PER_EPOCH} from "@lodestar/params"; import {sleep, toHex} from "@lodestar/utils"; import {defaultOptions as defaultValidatorOptions} from "@lodestar/validator"; import {rangeSyncTest} from "../../../../state-transition/test/perf/params.js"; @@ -82,7 +82,6 @@ describe.skip("verify+import blocks - range sync perf test", () => { proposerBoost: true, proposerBoostReorg: true, computeUnrealized: false, - safeSlotsToImportOptimistically: SAFE_SLOTS_TO_IMPORT_OPTIMISTICALLY, disableArchiveOnCheckpoint: true, suggestedFeeRecipient: defaultValidatorOptions.suggestedFeeRecipient, skipCreateStateCacheIfAvailable: true, diff --git a/packages/beacon-node/test/scripts/el-interop/besu/post-merge.sh b/packages/beacon-node/test/scripts/el-interop/besu.sh similarity index 88% rename from packages/beacon-node/test/scripts/el-interop/besu/post-merge.sh rename to packages/beacon-node/test/scripts/el-interop/besu.sh index d864814ece..1f54d86467 100755 --- a/packages/beacon-node/test/scripts/el-interop/besu/post-merge.sh +++ b/packages/beacon-node/test/scripts/el-interop/besu.sh @@ -2,7 +2,8 @@ scriptDir=$(dirname $0) currentDir=$(pwd) +elDir=$scriptDir/besu -. $scriptDir/common-setup.sh +. $elDir/common-setup.sh $EL_BINARY_DIR/besu --engine-rpc-enabled --rpc-http-enabled --rpc-http-api ADMIN,ETH,MINER,NET --rpc-http-port $ETH_PORT --engine-rpc-port $ENGINE_PORT --engine-jwt-secret $currentDir/$DATA_DIR/jwtsecret --data-path $DATA_DIR --data-storage-format BONSAI --genesis-file $DATA_DIR/genesis.json diff --git a/packages/beacon-node/test/scripts/el-interop/besu/common-setup.sh b/packages/beacon-node/test/scripts/el-interop/besu/common-setup.sh index f211f5d071..46e6cc6fda 100755 --- a/packages/beacon-node/test/scripts/el-interop/besu/common-setup.sh +++ b/packages/beacon-node/test/scripts/el-interop/besu/common-setup.sh @@ -6,11 +6,11 @@ echo $EL_BINARY_DIR echo $JWT_SECRET_HEX echo $TEMPLATE_FILE -echo $scriptDir +echo $elDir echo $currentDir -env TTD=$TTD envsubst < $scriptDir/$TEMPLATE_FILE > $DATA_DIR/genesis.json +env TTD=$TTD envsubst < $elDir/$TEMPLATE_FILE > $DATA_DIR/genesis.json echo "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8" > $DATA_DIR/sk.json echo "12345678" > $DATA_DIR/password.txt pubKey="0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" diff --git a/packages/beacon-node/test/scripts/el-interop/besudocker/post-merge.sh b/packages/beacon-node/test/scripts/el-interop/besudocker.sh similarity index 90% rename from packages/beacon-node/test/scripts/el-interop/besudocker/post-merge.sh rename to packages/beacon-node/test/scripts/el-interop/besudocker.sh index d26307ee3d..30117834d4 100755 --- a/packages/beacon-node/test/scripts/el-interop/besudocker/post-merge.sh +++ b/packages/beacon-node/test/scripts/el-interop/besudocker.sh @@ -2,7 +2,8 @@ scriptDir=$(dirname $0) currentDir=$(pwd) +elDir=$scriptDir/besudocker -. $scriptDir/common-setup.sh +. $elDir/common-setup.sh docker run --rm -u $(id -u ${USER}):$(id -g ${USER}) --name custom-execution -p $ETH_PORT:$ETH_PORT -p $ENGINE_PORT:$ENGINE_PORT -v $currentDir/$DATA_DIR:/data $EL_BINARY_DIR --engine-rpc-enabled --rpc-http-enabled --rpc-http-api ADMIN,ETH,MINER,NET --rpc-http-port $ETH_PORT --engine-rpc-port $ENGINE_PORT --engine-jwt-secret /data/jwtsecret --data-path /data/besu --data-storage-format BONSAI --genesis-file /data/genesis.json diff --git a/packages/beacon-node/test/scripts/el-interop/besudocker/common-setup.sh b/packages/beacon-node/test/scripts/el-interop/besudocker/common-setup.sh index b3d93190ef..cde122a656 100644 --- a/packages/beacon-node/test/scripts/el-interop/besudocker/common-setup.sh +++ b/packages/beacon-node/test/scripts/el-interop/besudocker/common-setup.sh @@ -6,11 +6,11 @@ echo $EL_BINARY_DIR echo $JWT_SECRET_HEX echo $TEMPLATE_FILE -echo $scriptDir +echo $elDir echo $currentDir -env TTD=$TTD envsubst < $scriptDir/$TEMPLATE_FILE > $DATA_DIR/genesis.json +env TTD=$TTD envsubst < $elDir/$TEMPLATE_FILE > $DATA_DIR/genesis.json echo "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8" > $DATA_DIR/sk.json echo "12345678" > $DATA_DIR/password.txt pubKey="0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" diff --git a/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/post-merge.sh b/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker.sh similarity index 86% rename from packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/post-merge.sh rename to packages/beacon-node/test/scripts/el-interop/ethereumjsdocker.sh index fbf9dcaaf9..42293061e1 100755 --- a/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/post-merge.sh +++ b/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker.sh @@ -2,7 +2,8 @@ scriptDir=$(dirname $0) currentDir=$(pwd) +elDir=$scriptDir/ethereumjsdocker -. $scriptDir/common-setup.sh +. $elDir/common-setup.sh docker run --rm -u $(id -u ${USER}):$(id -g ${USER}) --name custom-execution --network host -v $currentDir/$DATA_DIR:/data $EL_BINARY_DIR --dataDir /data/ethereumjs --gethGenesis /data/genesis.json --rpc --rpcEngineAddr 0.0.0.0 --rpcAddr 0.0.0.0 --rpcEngine --jwt-secret /data/jwtsecret --logLevel debug --isSingleNode diff --git a/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/common-setup.sh b/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/common-setup.sh index f5e64eadf2..46741823b8 100755 --- a/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/common-setup.sh +++ b/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/common-setup.sh @@ -6,11 +6,11 @@ echo $EL_BINARY_DIR echo $JWT_SECRET_HEX echo $TEMPLATE_FILE -echo $scriptDir +echo $elDir echo $currentDir -env TTD=$TTD envsubst < $scriptDir/$TEMPLATE_FILE > $DATA_DIR/genesis.json +env TTD=$TTD envsubst < $elDir/$TEMPLATE_FILE > $DATA_DIR/genesis.json echo "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8" > $DATA_DIR/sk.json echo "12345678" > $DATA_DIR/password.txt pubKey="0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" diff --git a/packages/beacon-node/test/scripts/el-interop/geth/post-merge.sh b/packages/beacon-node/test/scripts/el-interop/geth.sh similarity index 87% rename from packages/beacon-node/test/scripts/el-interop/geth/post-merge.sh rename to packages/beacon-node/test/scripts/el-interop/geth.sh index 7f2ce0c96f..b9e6923df4 100755 --- a/packages/beacon-node/test/scripts/el-interop/geth/post-merge.sh +++ b/packages/beacon-node/test/scripts/el-interop/geth.sh @@ -2,7 +2,8 @@ scriptDir=$(dirname $0) currentDir=$(pwd) +elDir=$scriptDir/geth -. $scriptDir/common-setup.sh +. $elDir/common-setup.sh $EL_BINARY_DIR/geth --http -http.api "engine,net,eth,miner" --http.port $ETH_PORT --authrpc.port $ENGINE_PORT --authrpc.jwtsecret $currentDir/$DATA_DIR/jwtsecret --datadir $DATA_DIR --allow-insecure-unlock --unlock $pubKey --password $DATA_DIR/password.txt --syncmode full \ No newline at end of file diff --git a/packages/beacon-node/test/scripts/el-interop/geth/common-setup.sh b/packages/beacon-node/test/scripts/el-interop/geth/common-setup.sh index b91a1a49ba..aac1ec4c20 100755 --- a/packages/beacon-node/test/scripts/el-interop/geth/common-setup.sh +++ b/packages/beacon-node/test/scripts/el-interop/geth/common-setup.sh @@ -5,11 +5,11 @@ echo $DATA_DIR echo $EL_BINARY_DIR echo $JWT_SECRET_HEX -echo $scriptDir +echo $elDir echo $currentDir -env TTD=$TTD envsubst < $scriptDir/genesisPre.tmpl > $DATA_DIR/genesis.json +env TTD=$TTD envsubst < $elDir/genesisPre.tmpl > $DATA_DIR/genesis.json echo "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8" > $DATA_DIR/sk.json echo "12345678" > $DATA_DIR/password.txt pubKey="0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" diff --git a/packages/beacon-node/test/scripts/el-interop/geth/pre-merge.sh b/packages/beacon-node/test/scripts/el-interop/geth/pre-merge.sh deleted file mode 100755 index fccf432e14..0000000000 --- a/packages/beacon-node/test/scripts/el-interop/geth/pre-merge.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash -x - -scriptDir=$(dirname $0) -currentDir=$(pwd) - -. $scriptDir/common-setup.sh - -$EL_BINARY_DIR/geth --http -http.api "engine,net,eth,miner" --http.port $ETH_PORT --authrpc.port $ENGINE_PORT --authrpc.jwtsecret $currentDir/$DATA_DIR/jwtsecret --datadir $DATA_DIR --allow-insecure-unlock --unlock $pubKey --password $DATA_DIR/password.txt --nodiscover --mine --syncmode full diff --git a/packages/beacon-node/test/scripts/el-interop/gethdocker/post-merge.sh b/packages/beacon-node/test/scripts/el-interop/gethdocker.sh similarity index 89% rename from packages/beacon-node/test/scripts/el-interop/gethdocker/post-merge.sh rename to packages/beacon-node/test/scripts/el-interop/gethdocker.sh index 177d0dcb7e..a7f8fcb208 100755 --- a/packages/beacon-node/test/scripts/el-interop/gethdocker/post-merge.sh +++ b/packages/beacon-node/test/scripts/el-interop/gethdocker.sh @@ -2,7 +2,8 @@ scriptDir=$(dirname $0) currentDir=$(pwd) +elDir=$scriptDir/gethdocker -. $scriptDir/common-setup.sh +. $elDir/common-setup.sh docker run --rm -u $(id -u ${USER}):$(id -g ${USER}) --name custom-execution --network host -v $currentDir/$DATA_DIR:/data $EL_BINARY_DIR --http -http.api "engine,net,eth,miner" --http.port $ETH_PORT --authrpc.port $ENGINE_PORT --authrpc.jwtsecret /data/jwtsecret --allow-insecure-unlock --unlock $pubKey --password /data/password.txt --datadir /data/geth --syncmode full diff --git a/packages/beacon-node/test/scripts/el-interop/gethdocker/common-setup.sh b/packages/beacon-node/test/scripts/el-interop/gethdocker/common-setup.sh index 4e77c3c108..0183c47064 100755 --- a/packages/beacon-node/test/scripts/el-interop/gethdocker/common-setup.sh +++ b/packages/beacon-node/test/scripts/el-interop/gethdocker/common-setup.sh @@ -6,11 +6,11 @@ echo $EL_BINARY_DIR echo $JWT_SECRET_HEX echo $TEMPLATE_FILE -echo $scriptDir +echo $elDir echo $currentDir -env TTD=$TTD envsubst < $scriptDir/$TEMPLATE_FILE > $DATA_DIR/genesis.json +env TTD=$TTD envsubst < $elDir/$TEMPLATE_FILE > $DATA_DIR/genesis.json echo "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8" > $DATA_DIR/sk.json echo "12345678" > $DATA_DIR/password.txt pubKey="0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" diff --git a/packages/beacon-node/test/scripts/el-interop/gethdocker/pre-merge.sh b/packages/beacon-node/test/scripts/el-interop/gethdocker/pre-merge.sh deleted file mode 100755 index a760fec65b..0000000000 --- a/packages/beacon-node/test/scripts/el-interop/gethdocker/pre-merge.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash -x - -scriptDir=$(dirname $0) -currentDir=$(pwd) - -. $scriptDir/common-setup.sh - -# EL_BINARY_DIR refers to the local docker image build from kiln/gethdocker folder -docker run --rm -u $(id -u ${USER}):$(id -g ${USER}) --name custom-execution --network host -v $currentDir/$DATA_DIR:/data $EL_BINARY_DIR --http -http.api "engine,net,eth,miner" --http.port $ETH_PORT --authrpc.port $ENGINE_PORT --authrpc.jwtsecret /data/jwtsecret --allow-insecure-unlock --unlock $pubKey --password /data/password.txt --datadir /data/geth --nodiscover --mine --syncmode full diff --git a/packages/beacon-node/test/scripts/el-interop/mergemock/common-setup.sh b/packages/beacon-node/test/scripts/el-interop/mergemock/common-setup.sh deleted file mode 100755 index aea73c4c7a..0000000000 --- a/packages/beacon-node/test/scripts/el-interop/mergemock/common-setup.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -x - -echo $TTD -echo $DATA_DIR -echo $EL_BINARY_DIR -echo $JWT_SECRET_HEX - -echo $scriptDir -echo $currentDir - - -env TTD=$TTD envsubst < $scriptDir/genesisPre.tmpl > $DATA_DIR/genesis.json -echo "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8" > $DATA_DIR/sk.json -echo "12345678" > $DATA_DIR/password.txt -pubKey="0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" - -# echo a hex encoded 256 bit secret into a file, however remove leading 0x as mergemock doesnt like it -echo $JWT_SECRET_HEX | sed 's/0x//' > $DATA_DIR/jwtsecret -docker rm -f custom-execution diff --git a/packages/beacon-node/test/scripts/el-interop/mergemock/genesisPre.tmpl b/packages/beacon-node/test/scripts/el-interop/mergemock/genesisPre.tmpl deleted file mode 100644 index e3e1fa4141..0000000000 --- a/packages/beacon-node/test/scripts/el-interop/mergemock/genesisPre.tmpl +++ /dev/null @@ -1,36 +0,0 @@ -{ - "config": { - "chainId": 1, - "homesteadBlock": 0, - "eip150Block": 0, - "eip150Hash": "0x0000000000000000000000000000000000000000000000000000000000000000", - "eip155Block": 0, - "eip158Block": 0, - "byzantiumBlock": 0, - "constantinopleBlock": 0, - "petersburgBlock": 0, - "istanbulBlock": 0, - "muirGlacierBlock": 0, - "berlinBlock": 0, - "londonBlock": 0, - "clique": { - "period": 5, - "epoch": 30000 - }, - "terminalTotalDifficulty": ${TTD} - }, - "nonce": "0x42", - "timestamp": "0x0", - "extraData": "0x0000000000000000000000000000000000000000000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "gasLimit": "0x1c9c380", - "difficulty": "0x0", - "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", - "coinbase": "0x0000000000000000000000000000000000000000", - "alloc": { - "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": {"balance": "0x6d6172697573766477000000"} - }, - "number": "0x0", - "gasUsed": "0x0", - "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000", - "baseFeePerGas": "0x7" -} diff --git a/packages/beacon-node/test/scripts/el-interop/mergemock/jwt.hex b/packages/beacon-node/test/scripts/el-interop/mergemock/jwt.hex deleted file mode 100644 index ed20b10a08..0000000000 --- a/packages/beacon-node/test/scripts/el-interop/mergemock/jwt.hex +++ /dev/null @@ -1 +0,0 @@ -0xdc6457099f127cf0bac78de8b297df04951281909db4f58b43def7c7151e765d diff --git a/packages/beacon-node/test/scripts/el-interop/mergemock/post-merge.sh b/packages/beacon-node/test/scripts/el-interop/mergemock/post-merge.sh deleted file mode 100755 index 902b868cc8..0000000000 --- a/packages/beacon-node/test/scripts/el-interop/mergemock/post-merge.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash -x - -scriptDir=$(dirname $0) -currentDir=$(pwd) - -. $scriptDir/common-setup.sh - -# if we don't provide any datadir merge mock stores data in memory which is fine by us -docker run --rm -u $(id -u ${USER}):$(id -g ${USER}) --name custom-execution --network host -v $currentDir/$DATA_DIR/genesis.json:/usr/app/genesis.json -v $currentDir/$DATA_DIR/jwtsecret:/usr/app/jwt.hex $EL_BINARY_DIR relay --listen-addr 127.0.0.1:$ETH_PORT --engine-listen-addr 127.0.0.1:$ENGINE_PORT --log.level debug --genesis-validators-root 0x3e8bd71d9925794b4f5e8623e15094ea6edc0fd206e3551e13dd2d10e08fbaba diff --git a/packages/beacon-node/test/scripts/el-interop/nethermind/post-merge.sh b/packages/beacon-node/test/scripts/el-interop/nethermind.sh similarity index 89% rename from packages/beacon-node/test/scripts/el-interop/nethermind/post-merge.sh rename to packages/beacon-node/test/scripts/el-interop/nethermind.sh index d379ff4032..6c0323ed81 100755 --- a/packages/beacon-node/test/scripts/el-interop/nethermind/post-merge.sh +++ b/packages/beacon-node/test/scripts/el-interop/nethermind.sh @@ -2,8 +2,9 @@ scriptDir=$(dirname $0) currentDir=$(pwd) +elDir=$scriptDir/nethermind -. $scriptDir/common-setup.sh +. $elDir/common-setup.sh cd $EL_BINARY_DIR dotnet run -c Release -- --config themerge_kiln_testvectors --Merge.TerminalTotalDifficulty $TTD --JsonRpc.JwtSecretFile $currentDir/$DATA_DIR/jwtsecret --JsonRpc.Enabled true --JsonRpc.Host 0.0.0.0 --JsonRpc.AdditionalRpcUrls "http://localhost:$ETH_PORT|http|net;eth;subscribe;engine;web3;client|no-auth,http://localhost:$ENGINE_PORT|http|eth;engine" --Sync.SnapSync false diff --git a/packages/beacon-node/test/scripts/el-interop/nethermind/pre-merge.sh b/packages/beacon-node/test/scripts/el-interop/nethermind/pre-merge.sh deleted file mode 100755 index 72cf36efa6..0000000000 --- a/packages/beacon-node/test/scripts/el-interop/nethermind/pre-merge.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash -x - -scriptDir=$(dirname $0) -currentDir=$(pwd) - -. $scriptDir/common-setup.sh - -cd $EL_BINARY_DIR -dotnet run -c Release -- --config themerge_kiln_m2 --Merge.TerminalTotalDifficulty $TTD --JsonRpc.JwtSecretFile $currentDir/$DATA_DIR/jwtsecret --Merge.Enabled true --Init.DiagnosticMode=None --JsonRpc.Enabled true --JsonRpc.Host 0.0.0.0 --JsonRpc.AdditionalRpcUrls "http://localhost:$ETH_PORT|http|net;eth;subscribe;engine;web3;client|no-auth,http://localhost:$ENGINE_PORT|http|eth;engine" --Sync.SnapSync false diff --git a/packages/beacon-node/test/scripts/el-interop/netherminddocker/post-merge.sh b/packages/beacon-node/test/scripts/el-interop/netherminddocker.sh similarity index 91% rename from packages/beacon-node/test/scripts/el-interop/netherminddocker/post-merge.sh rename to packages/beacon-node/test/scripts/el-interop/netherminddocker.sh index 2a41fe8979..fa3a2b5ab1 100755 --- a/packages/beacon-node/test/scripts/el-interop/netherminddocker/post-merge.sh +++ b/packages/beacon-node/test/scripts/el-interop/netherminddocker.sh @@ -2,8 +2,9 @@ scriptDir=$(dirname $0) currentDir=$(pwd) +elDir=$scriptDir/netherminddocker -. $scriptDir/common-setup.sh +. $elDir/common-setup.sh if [ "$TEMPLATE_FILE" == "genesisPostWithdraw.tmpl" ] then diff --git a/packages/beacon-node/test/scripts/el-interop/netherminddocker/common-setup.sh b/packages/beacon-node/test/scripts/el-interop/netherminddocker/common-setup.sh index 8892332be4..bde72a2c05 100755 --- a/packages/beacon-node/test/scripts/el-interop/netherminddocker/common-setup.sh +++ b/packages/beacon-node/test/scripts/el-interop/netherminddocker/common-setup.sh @@ -5,7 +5,7 @@ echo $DATA_DIR echo $EL_BINARY_DIR echo $JWT_SECRET_HEX -echo $scriptDir +echo $elDir echo $currentDir # echo a hex encoded 256 bit secret into a file diff --git a/packages/beacon-node/test/scripts/el-interop/netherminddocker/pre-merge.sh b/packages/beacon-node/test/scripts/el-interop/netherminddocker/pre-merge.sh deleted file mode 100755 index 468e009335..0000000000 --- a/packages/beacon-node/test/scripts/el-interop/netherminddocker/pre-merge.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -x - -scriptDir=$(dirname $0) -currentDir=$(pwd) - -. $scriptDir/common-setup.sh - -echo "sleeping for 10 seconds..." - -docker run --rm --network host --name custom-execution -v $currentDir/$DATA_DIR:/data $EL_BINARY_DIR --datadir /data/nethermind --config themerge_kiln_m2 --Merge.TerminalTotalDifficulty $TTD --JsonRpc.JwtSecretFile /data/jwtsecret --Merge.Enabled true --Init.DiagnosticMode=None --JsonRpc.Enabled true --JsonRpc.Host 0.0.0.0 --JsonRpc.AdditionalRpcUrls "http://localhost:$ETH_PORT|http|net;eth;subscribe;engine;web3;client|no-auth,http://localhost:$ENGINE_PORT|http|eth;engine" --Sync.SnapSync false diff --git a/packages/beacon-node/test/sim/electra-interop.test.ts b/packages/beacon-node/test/sim/electra-interop.test.ts index d4dfa5b5d7..6166df8cd2 100644 --- a/packages/beacon-node/test/sim/electra-interop.test.ts +++ b/packages/beacon-node/test/sim/electra-interop.test.ts @@ -20,7 +20,7 @@ import {TestLoggerOpts, testLogger} from "../utils/logger.js"; import {getDevBeaconNode} from "../utils/node/beacon.js"; import {simTestInfoTracker} from "../utils/node/simTest.js"; import {getAndInitDevValidators} from "../utils/node/validator.js"; -import {ELClient, ELStartMode, runEL, sendRawTransactionBig} from "../utils/runEl.js"; +import {ELClient, runEL, sendRawTransactionBig} from "../utils/runEl.js"; import {logFilesDir} from "./params.js"; import {shell} from "./shell.js"; @@ -67,7 +67,7 @@ describe("executionEngine / ExecutionEngineHttp", () => { it("Send and get payloads with depositRequests to/from EL", async () => { const {elClient, tearDownCallBack} = await runEL( - {...elSetupConfig, mode: ELStartMode.PostMerge, genesisTemplate: "electra.tmpl"}, + {...elSetupConfig, genesisTemplate: "electra.tmpl"}, {...elRunOptions, ttd: BigInt(0)}, controller.signal ); @@ -232,7 +232,7 @@ describe("executionEngine / ExecutionEngineHttp", () => { it.skip("Post-merge, run for a few blocks", async () => { console.log("\n\nPost-merge, run for a few blocks\n\n"); const {elClient, tearDownCallBack} = await runEL( - {...elSetupConfig, mode: ELStartMode.PostMerge, genesisTemplate: "electra.tmpl"}, + {...elSetupConfig, genesisTemplate: "electra.tmpl"}, {...elRunOptions, ttd: BigInt(0)}, controller.signal ); @@ -259,7 +259,7 @@ describe("executionEngine / ExecutionEngineHttp", () => { electraEpoch: Epoch; testName: string; }): Promise { - const {genesisBlockHash, ttd, engineRpcUrl, ethRpcUrl} = elClient; + const {genesisBlockHash, engineRpcUrl, ethRpcUrl} = elClient; const validatorClientCount = 1; const validatorsPerClient = 32; @@ -306,7 +306,6 @@ describe("executionEngine / ExecutionEngineHttp", () => { CAPELLA_FORK_EPOCH: 0, DENEB_FORK_EPOCH: 0, ELECTRA_FORK_EPOCH: electraEpoch, - TERMINAL_TOTAL_DIFFICULTY: ttd, }, options: { api: {rest: {enabled: true} as BeaconRestApiServerOpts}, diff --git a/packages/beacon-node/test/sim/mergemock.test.ts b/packages/beacon-node/test/sim/mergemock.test.ts deleted file mode 100644 index 1fe15518f7..0000000000 --- a/packages/beacon-node/test/sim/mergemock.test.ts +++ /dev/null @@ -1,273 +0,0 @@ -import fs from "node:fs"; -import {afterAll, afterEach, describe, it, vi} from "vitest"; -import {fromHexString, toHexString} from "@chainsafe/ssz"; -import {routes} from "@lodestar/api"; -import {ChainConfig} from "@lodestar/config"; -import {TimestampFormatCode} from "@lodestar/logger"; -import {SLOTS_PER_EPOCH} from "@lodestar/params"; -import {Epoch, SignedBeaconBlock, bellatrix} from "@lodestar/types"; -import {LogLevel, sleep} from "@lodestar/utils"; -import {ValidatorProposerConfig} from "@lodestar/validator"; -import {BeaconRestApiServerOpts} from "../../src/api/index.js"; -import {ZERO_HASH} from "../../src/constants/index.js"; -import {BuilderStatus} from "../../src/execution/builder/http.js"; -import {Eth1Provider} from "../../src/index.js"; -import {ClockEvent} from "../../src/util/clock.js"; -import {TestLoggerOpts, testLogger} from "../utils/logger.js"; -import {getDevBeaconNode} from "../utils/node/beacon.js"; -import {simTestInfoTracker} from "../utils/node/simTest.js"; -import {getAndInitDevValidators} from "../utils/node/validator.js"; -import {ELClient, ELStartMode, runEL} from "../utils/runEl.js"; -import {logFilesDir} from "./params.js"; -import {shell} from "./shell.js"; - -// NOTE: How to run -// EL_BINARY_DIR=g11tech/mergemock:latest EL_SCRIPT_DIR=mergemock LODESTAR_PRESET=mainnet ETH_PORT=8661 ENGINE_PORT=8551 yarn vitest run test/sim/mergemock.test.ts -// ``` - -const jwtSecretHex = "0xdc6457099f127cf0bac78de8b297df04951281909db4f58b43def7c7151e765d"; - -describe("executionEngine / ExecutionEngineHttp", () => { - if (!process.env.EL_BINARY_DIR || !process.env.EL_SCRIPT_DIR) { - throw Error( - `EL ENV must be provided, EL_BINARY_DIR: ${process.env.EL_BINARY_DIR}, EL_SCRIPT_DIR: ${process.env.EL_SCRIPT_DIR}` - ); - } - vi.setConfig({testTimeout: 10 * 60 * 1000}); - - const dataPath = fs.mkdtempSync("lodestar-test-mergemock"); - const elSetupConfig = { - elScriptDir: process.env.EL_SCRIPT_DIR, - elBinaryDir: process.env.EL_BINARY_DIR, - }; - const elRunOptions = { - dataPath, - jwtSecretHex, - enginePort: parseInt(process.env.ENGINE_PORT ?? "8551"), - ethPort: parseInt(process.env.ETH_PORT ?? "8545"), - }; - - const controller = new AbortController(); - afterAll(async () => { - controller?.abort(); - await shell(`sudo rm -rf ${dataPath}`); - }); - - const afterEachCallbacks: (() => Promise | void)[] = []; - afterEach(async () => { - while (afterEachCallbacks.length > 0) { - const callback = afterEachCallbacks.pop(); - if (callback) await callback(); - } - }); - - it("Test builder flow", async () => { - console.log("\n\nPost-merge, run for a few blocks\n\n"); - const {elClient, tearDownCallBack} = await runEL( - {...elSetupConfig, mode: ELStartMode.PostMerge}, - {...elRunOptions, ttd: BigInt(0)}, - controller.signal - ); - afterEachCallbacks.push(() => tearDownCallBack()); - - await runNodeWithEL({ - elClient, - bellatrixEpoch: 0, - testName: "post-merge", - }); - }); - - type RunOpts = {elClient: ELClient; bellatrixEpoch: Epoch; testName: string}; - - async function runNodeWithEL({elClient, bellatrixEpoch, testName}: RunOpts): Promise { - const {genesisBlockHash, ttd, engineRpcUrl, ethRpcUrl} = elClient; - const validatorClientCount = 1; - const validatorsPerClient = 32; - - const testParams: Pick = { - SLOT_DURATION_MS: 2000, - }; - - // Should reach justification in 6 epochs max. - // Merge block happens at epoch 2 slot 4. Then 4 epochs to finalize - const expectedEpochsToFinish = 1; - // 1 epoch of margin of error - const epochsOfMargin = 1; - const timeoutSetupMargin = 30 * 1000; // Give extra 30 seconds of margin - - // We only expect builder blocks since `builderalways` is configured - // In a perfect run expected builder = 32, expected engine = 0 - // keeping 4 missed slots and 4 engine blocks due to fallback as margin - const expectedBuilderBlocks = 28; - const maximumEngineBlocks = 4; - - // All assertions are tracked w.r.t. fee recipient by attaching different fee recipient to - // execution and builder - const feeRecipientLocal = "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"; - const feeRecipientEngine = "0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"; - const feeRecipientMevBoost = "0xcccccccccccccccccccccccccccccccccccccccc"; - - // delay a bit so regular sync sees it's up to date and sync is completed from the beginning - const genesisSlotsDelay = 8; - - const timeout = - ((epochsOfMargin + expectedEpochsToFinish) * SLOTS_PER_EPOCH + genesisSlotsDelay) * testParams.SLOT_DURATION_MS; - - vi.setConfig({testTimeout: timeout + 2 * timeoutSetupMargin}); - - const genesisTime = Math.floor(Date.now() / 1000) + genesisSlotsDelay * (testParams.SLOT_DURATION_MS / 1000); - - const testLoggerOpts: TestLoggerOpts = { - level: LogLevel.info, - file: { - filepath: `${logFilesDir}/mergemock-${testName}.log`, - level: LogLevel.debug, - }, - timestampFormat: { - format: TimestampFormatCode.EpochSlot, - genesisTime, - slotsPerEpoch: SLOTS_PER_EPOCH, - secondsPerSlot: testParams.SLOT_DURATION_MS / 1000, - }, - }; - const loggerNodeA = testLogger("Node-A", testLoggerOpts); - - const bn = await getDevBeaconNode({ - params: { - ...testParams, - ALTAIR_FORK_EPOCH: 0, - BELLATRIX_FORK_EPOCH: bellatrixEpoch, - TERMINAL_TOTAL_DIFFICULTY: ttd, - }, - options: { - api: {rest: {enabled: true} as BeaconRestApiServerOpts}, - sync: {isSingleNode: true}, - network: {allowPublishToZeroPeers: true, discv5: null}, - // Now eth deposit/merge tracker methods directly available on engine endpoints - eth1: {enabled: false, providerUrls: [engineRpcUrl], jwtSecretHex}, - executionEngine: {urls: [engineRpcUrl], jwtSecretHex}, - executionBuilder: { - url: ethRpcUrl, - enabled: true, - issueLocalFcUWithFeeRecipient: feeRecipientMevBoost, - allowedFaults: 8, - faultInspectionWindow: 32, - }, - chain: {suggestedFeeRecipient: feeRecipientLocal}, - }, - validatorCount: validatorClientCount * validatorsPerClient, - logger: loggerNodeA, - genesisTime, - eth1BlockHash: fromHexString(genesisBlockHash), - }); - if (!bn.chain.executionBuilder) { - throw Error("executionBuilder should have been initialized"); - } - // Enable builder by default, else because of circuit breaker we always start it with disabled - bn.chain.executionBuilder.updateStatus(BuilderStatus.enabled); - - afterEachCallbacks.push(async () => { - await bn.close(); - await sleep(1000); - }); - - const stopInfoTracker = simTestInfoTracker(bn, loggerNodeA); - const valProposerConfig = { - defaultConfig: { - graffiti: "default graffiti", - strictFeeRecipientCheck: true, - feeRecipient: feeRecipientEngine, - builder: { - gasLimit: 60000000, - selection: routes.validator.BuilderSelection.BuilderAlways, - }, - }, - } as ValidatorProposerConfig; - - const {validators} = await getAndInitDevValidators({ - logPrefix: "mergemock", - node: bn, - validatorsPerClient, - validatorClientCount, - startIndex: 0, - // At least one sim test must use the REST API for beacon <-> validator comms - useRestApi: true, - testLoggerOpts, - valProposerConfig, - }); - - afterEachCallbacks.push(async () => { - await Promise.all(validators.map((v) => v.close())); - }); - - let engineBlocks = 0; - let builderBlocks = 0; - await new Promise((resolve, _reject) => { - bn.chain.emitter.on(routes.events.EventType.block, async (blockData) => { - const {data: fullOrBlindedBlock} = (await bn.api.beacon.getBlockV2({blockId: blockData.block})) as { - data: SignedBeaconBlock; - }; - if (fullOrBlindedBlock !== undefined) { - const blockFeeRecipient = toHexString( - (fullOrBlindedBlock as bellatrix.SignedBeaconBlock).message.body.executionPayload.feeRecipient - ); - if (blockFeeRecipient === feeRecipientMevBoost) { - builderBlocks++; - } else { - engineBlocks++; - } - } - }); - bn.chain.clock.on(ClockEvent.epoch, (epoch) => { - // Resolve only if the finalized checkpoint includes execution payload - if (epoch >= expectedEpochsToFinish) { - console.log("\nGot event epoch, stopping validators and nodes\n"); - resolve(); - } - }); - }); - - // Stop chain and un-subscribe events so the execution engine won't update it's head - // Allow some time to broadcast finalized events and complete the importBlock routine - await Promise.all(validators.map((v) => v.close())); - await bn.close(); - await sleep(500); - - if (bn.chain.beaconProposerCache.get(1) !== feeRecipientEngine) { - throw Error("Invalid feeRecipient set at BN"); - } - - // Assertions to make sure the end state is good - // 1. The proper head is set - const rpc = new Eth1Provider({DEPOSIT_CONTRACT_ADDRESS: ZERO_HASH}, {providerUrls: [engineRpcUrl], jwtSecretHex}); - const consensusHead = bn.chain.forkChoice.getHead(); - const executionHeadBlock = await rpc.getBlockByNumber("latest"); - - if (!executionHeadBlock) throw Error("Execution has not head block"); - if (consensusHead.executionPayloadBlockHash !== executionHeadBlock.hash) { - throw Error( - "Consensus head not equal to execution head: " + - JSON.stringify({ - executionHeadBlockHash: executionHeadBlock.hash, - consensusHeadExecutionPayloadBlockHash: consensusHead.executionPayloadBlockHash, - consensusHeadSlot: consensusHead.slot, - }) - ); - } - - // 2. builder blocks are as expected - if (builderBlocks < expectedBuilderBlocks) { - throw Error(`Incorrect builderBlocks=${builderBlocks} (expected=${expectedBuilderBlocks})`); - } - - // 3. engine blocks do not exceed max limit - if (engineBlocks > maximumEngineBlocks) { - throw Error(`Incorrect engineBlocks=${engineBlocks} (limit=${maximumEngineBlocks})`); - } - - // wait for 1 slot to print current epoch stats - await sleep(1 * bn.config.SLOT_DURATION_MS); - stopInfoTracker(); - console.log("\n\nDone\n\n"); - } -}); diff --git a/packages/beacon-node/test/spec/presets/fork_choice.test.ts b/packages/beacon-node/test/spec/presets/fork_choice.test.ts index a10ddeb438..33bfb100c7 100644 --- a/packages/beacon-node/test/spec/presets/fork_choice.test.ts +++ b/packages/beacon-node/test/spec/presets/fork_choice.test.ts @@ -21,7 +21,6 @@ import { BeaconBlock, RootHex, SignedBeaconBlock, - bellatrix, deneb, fulu, ssz, @@ -40,7 +39,6 @@ import {defaultChainOptions} from "../../../src/chain/options.js"; import {validateBlockDataColumnSidecars} from "../../../src/chain/validation/dataColumnSidecar.js"; import {ZERO_HASH_HEX} from "../../../src/constants/constants.js"; import {Eth1ForBlockProductionDisabled} from "../../../src/eth1/index.js"; -import {PowMergeBlock} from "../../../src/eth1/interface.js"; import {ExecutionPayloadStatus} from "../../../src/execution/engine/interface.js"; import {ExecutionEngineMockBackend} from "../../../src/execution/engine/mock.js"; import {getExecutionEngineFromBackend} from "../../../src/execution/index.js"; @@ -61,7 +59,6 @@ const ANCHOR_BLOCK_FILE_NAME = "anchor_block"; const BLOCK_FILE_NAME = "^(block)_([0-9a-zA-Z]+)$"; const BLOBS_FILE_NAME = "^(blobs)_([0-9a-zA-Z]+)$"; const COLUMN_FILE_NAME = "^(column)_([0-9a-zA-Z]+)$"; -const POW_BLOCK_FILE_NAME = "^(pow_block)_([0-9a-zA-Z]+)$"; const ATTESTATION_FILE_NAME = "^(attestation)_([0-9a-zA-Z])+$"; const ATTESTER_SLASHING_FILE_NAME = "^(attester_slashing)_([0-9a-zA-Z])+$"; @@ -80,7 +77,7 @@ const forkChoiceTest = /** This is to track test's tickTime to be used in proposer boost */ let tickTime = 0; const clock = new ClockStopped(currentSlot); - const eth1 = new Eth1ForBlockProductionMock(); + const eth1 = new Eth1ForBlockProductionDisabled(); const executionEngineBackend = new ExecutionEngineMockBackend({ onlyPredefinedResponses: opts.onlyPredefinedResponses, genesisBlockHash: isExecutionStateType(anchorState) @@ -333,23 +330,6 @@ const forkChoiceTest = } } - // **on_merge_block execution** - // Adds PowBlock data which is required for executing on_block(store, block). - // The file is located in the same folder (see below). PowBlocks should be used as return values for - // get_pow_block(hash: Hash32) -> PowBlock function if hashes match. - else if (isPowBlock(step)) { - const powBlock = testcase.powBlocks.get(step.pow_block); - if (!powBlock) throw Error(`pow_block ${step.pow_block} not found`); - logger.debug(`Step ${i}/${stepsLen} pow_block`, { - blockHash: toHexString(powBlock.blockHash), - parentHash: toHexString(powBlock.parentHash), - }); - // Register PowBlock for `get_pow_block(hash: Hash32)` calls in verifyBlock - eth1.addPowBlock(powBlock); - // Register PowBlock to allow validation in execution engine - executionEngineBackend.addPowBlock(powBlock); - } - // Optional step for optimistic sync tests. else if (isOnPayloadInfoStep(step)) { logger.debug(`Step ${i}/${stepsLen} payload_status`, {blockHash: step.block_hash}); @@ -455,7 +435,6 @@ const forkChoiceTest = [BLOCK_FILE_NAME]: ssz[fork].SignedBeaconBlock, [BLOBS_FILE_NAME]: ssz.deneb.Blobs, [COLUMN_FILE_NAME]: ssz.fulu.DataColumnSidecar, - [POW_BLOCK_FILE_NAME]: ssz.bellatrix.PowBlock, [ATTESTATION_FILE_NAME]: sszTypesFor(fork).Attestation, [ATTESTER_SLASHING_FILE_NAME]: sszTypesFor(fork).AttesterSlashing, }, @@ -464,7 +443,6 @@ const forkChoiceTest = const blocks = new Map(); const blobs = new Map(); const columns = new Map(); - const powBlocks = new Map(); const attestations = new Map(); const attesterSlashings = new Map(); for (const key in t) { @@ -482,10 +460,6 @@ const forkChoiceTest = if (columnMatch) { columns.set(key, t[key]); } - const powBlockMatch = key.match(POW_BLOCK_FILE_NAME); - if (powBlockMatch) { - powBlocks.set(key, t[key]); - } const attMatch = key.match(ATTESTATION_FILE_NAME); if (attMatch) { attestations.set(key, t[key]); @@ -503,7 +477,6 @@ const forkChoiceTest = blocks, blobs, columns, - powBlocks, attestations, attesterSlashings, }; @@ -530,7 +503,7 @@ function toSpecTestCheckpoint(checkpoint: CheckpointWithHex): SpecTestCheckpoint }; } -type Step = OnTick | OnAttestation | OnAttesterSlashing | OnBlock | OnPowBlock | OnPayloadInfo | Checks; +type Step = OnTick | OnAttestation | OnAttesterSlashing | OnBlock | OnPayloadInfo | Checks; type SpecTestCheckpoint = {epoch: bigint; root: string}; @@ -571,15 +544,6 @@ type OnBlock = { valid?: number; }; -/** Optional step for optimistic sync tests. */ -type OnPowBlock = { - /** - * the name of the `pow_block_<32-byte-root>.ssz_snappy` file. To - * execute `on_pow_block(store, block)` - */ - pow_block: string; -}; - type OnPayloadInfo = { /** Encoded 32-byte value of payload's block hash. */ block_hash: string; @@ -622,7 +586,6 @@ type ForkChoiceTestCase = { blocks: Map; blobs: Map; columns: Map; - powBlocks: Map; attestations: Map; attesterSlashings: Map; }; @@ -643,10 +606,6 @@ function isBlock(step: Step): step is OnBlock { return typeof (step as OnBlock).block === "string"; } -function isPowBlock(step: Step): step is OnPowBlock { - return typeof (step as OnPowBlock).pow_block === "string"; -} - function isOnPayloadInfoStep(step: Step): step is OnPayloadInfo { return typeof (step as OnPayloadInfo).block_hash === "string"; } @@ -655,25 +614,6 @@ function isCheck(step: Step): step is Checks { return typeof (step as Checks).checks === "object"; } -// Extend Eth1ForBlockProductionDisabled to not have to re-implement new methods -class Eth1ForBlockProductionMock extends Eth1ForBlockProductionDisabled { - private items = new Map(); - - async getPowBlock(powBlockHash: string): Promise { - return this.items.get(powBlockHash) ?? null; - } - - addPowBlock(powBlock: bellatrix.PowBlock): void { - this.items.set(toHexString(powBlock.blockHash), { - // not used by verifyBlock() - number: 0, - blockHash: toHexString(powBlock.blockHash), - parentHash: toHexString(powBlock.parentHash), - totalDifficulty: powBlock.totalDifficulty, - }); - } -} - specTestIterator(path.join(ethereumConsensusSpecsTests.outputDir, "tests", ACTIVE_PRESET), { fork_choice: {type: RunnerType.default, fn: forkChoiceTest({onlyPredefinedResponses: false})}, sync: {type: RunnerType.default, fn: forkChoiceTest({onlyPredefinedResponses: true})}, diff --git a/packages/beacon-node/test/spec/utils/specTestIterator.ts b/packages/beacon-node/test/spec/utils/specTestIterator.ts index b83d3726e1..428b6602d4 100644 --- a/packages/beacon-node/test/spec/utils/specTestIterator.ts +++ b/packages/beacon-node/test/spec/utils/specTestIterator.ts @@ -60,10 +60,13 @@ const coveredTestRunners = [ // ``` export const defaultSkipOpts: SkipOpts = { skippedForks: ["eip7805"], - // TODO: capella - // BeaconBlockBody proof in lightclient is the new addition in v1.3.0-rc.2-hotfix - // Skip them for now to enable subsequently skippedTestSuites: [ + // Merge transition tests are skipped because we no longer support performing the merge transition. + // All networks have already completed the merge, so this code path is no longer needed. + /^bellatrix\/fork_choice\/on_merge_block\/.*/, + // TODO: capella + // BeaconBlockBody proof in lightclient is the new addition in v1.3.0-rc.2-hotfix + // Skip them for now to enable subsequently /^capella\/light_client\/single_merkle_proof\/BeaconBlockBody.*/, /^deneb\/light_client\/single_merkle_proof\/BeaconBlockBody.*/, /^electra\/light_client\/single_merkle_proof\/BeaconBlockBody.*/, @@ -72,7 +75,12 @@ export const defaultSkipOpts: SkipOpts = { /^gloas\/(finality|fork_choice|networking|sanity|transition)\/.*$/, /^gloas\/ssz_static\/ForkChoiceNode.*$/, ], - skippedTests: [], + skippedTests: [ + // These tests validate "first payload" scenarios where is_execution_enabled was false pre-merge. + // Since we removed merge transition support, these code paths no longer exist. + /^bellatrix\/operations\/execution_payload\/.+\/bad_parent_hash_first_payload$/, + /^bellatrix\/sanity\/blocks\/.+\/is_execution_enabled_false$/, + ], skippedRunners: [], }; diff --git a/packages/beacon-node/test/unit/eth1/eth1MergeBlockTracker.test.ts b/packages/beacon-node/test/unit/eth1/eth1MergeBlockTracker.test.ts deleted file mode 100644 index 5f19c5c4f5..0000000000 --- a/packages/beacon-node/test/unit/eth1/eth1MergeBlockTracker.test.ts +++ /dev/null @@ -1,256 +0,0 @@ -import {afterEach, beforeEach, describe, expect, it} from "vitest"; -import {toHexString} from "@chainsafe/ssz"; -import {ChainConfig} from "@lodestar/config"; -import {sleep} from "@lodestar/utils"; -import {ZERO_HASH} from "../../../src/constants/index.js"; -import {Eth1MergeBlockTracker, StatusCode, toPowBlock} from "../../../src/eth1/eth1MergeBlockTracker.js"; -import {Eth1ProviderState, EthJsonRpcBlockRaw} from "../../../src/eth1/interface.js"; -import {IEth1Provider} from "../../../src/index.js"; -import {testLogger} from "../../utils/logger.js"; - -describe("eth1 / Eth1MergeBlockTracker", () => { - const logger = testLogger(); - - const terminalTotalDifficulty = 1000; - let config: ChainConfig; - let controller: AbortController; - beforeEach(() => { - controller = new AbortController(); - - config = { - // Set time units to 0 to make the test as fast as possible - SECONDS_PER_ETH1_BLOCK: 0, - SLOT_DURATION_MS: 0, - // Hardcode TTD to a low value - TERMINAL_TOTAL_DIFFICULTY: BigInt(terminalTotalDifficulty), - TERMINAL_BLOCK_HASH: ZERO_HASH, - } as Partial as ChainConfig; - }); - - afterEach(() => controller.abort()); - - it("Should find terminal pow block through TERMINAL_BLOCK_HASH", async () => { - config.TERMINAL_BLOCK_HASH = Buffer.alloc(32, 1); - const block: EthJsonRpcBlockRaw = { - number: toHex(10), - hash: toRootHex(11), - parentHash: toRootHex(10), - totalDifficulty: toHex(100), - timestamp: "0x0", - }; - const terminalPowBlock = toPowBlock(block); - const eth1Provider: IEth1Provider = { - deployBlock: 0, - getBlockNumber: async () => 0, - getBlockByNumber: async () => { - throw Error("Not implemented"); - }, - getBlockByHash: async (blockHashHex): Promise => { - return blockHashHex === toHexString(config.TERMINAL_BLOCK_HASH) ? block : null; - }, - getBlocksByNumber: async (): Promise => { - throw Error("Not implemented"); - }, - getDepositEvents: async (): Promise => { - throw Error("Not implemented"); - }, - validateContract: async (): Promise => { - throw Error("Not implemented"); - }, - getState: () => Eth1ProviderState.ONLINE, - }; - - const eth1MergeBlockTracker = new Eth1MergeBlockTracker( - { - config, - logger, - signal: controller.signal, - metrics: null, - }, - eth1Provider - ); - eth1MergeBlockTracker.startPollingMergeBlock(); - - // Wait for Eth1MergeBlockTracker to find at least one merge block - while (!controller.signal.aborted) { - if (await eth1MergeBlockTracker.getTerminalPowBlock()) break; - await sleep(10, controller.signal); - } - - // Status should acknowlege merge block is found - expect(eth1MergeBlockTracker["status"].code).toBe(StatusCode.FOUND); - - // Given the total difficulty offset the block that has TTD is the `difficultyOffset`nth block - expect(await eth1MergeBlockTracker.getTerminalPowBlock()).toEqual(terminalPowBlock); - }); - - it("Should find terminal pow block polling future 'latest' blocks", async () => { - // Set current network totalDifficulty to behind terminalTotalDifficulty by 5. - // Then on each call to getBlockByNumber("latest") increase totalDifficulty by 1. - const numOfBlocks = 5; - const difficulty = 1; - - let latestBlockPointer = 0; - - const blocks: EthJsonRpcBlockRaw[] = []; - const blocksByHash = new Map(); - - for (let i = 0; i < numOfBlocks + 1; i++) { - const block: EthJsonRpcBlockRaw = { - number: toHex(i), - hash: toRootHex(i + 1), - parentHash: toRootHex(i), - // Latest block is under TTD, so past block search is stopped - totalDifficulty: toHex(terminalTotalDifficulty - numOfBlocks * difficulty + i * difficulty), - timestamp: "0x0", - }; - blocks.push(block); - } - - const eth1Provider: IEth1Provider = { - deployBlock: 0, - getBlockNumber: async () => 0, - getBlockByNumber: async (blockNumber) => { - // On each call simulate that the eth1 chain advances 1 block with +1 totalDifficulty - if (blockNumber === "latest") { - if (latestBlockPointer >= blocks.length) { - throw Error("Fetched too many blocks"); - } - return blocks[latestBlockPointer++]; - } - return blocks[blockNumber]; - }, - getBlockByHash: async (blockHashHex) => blocksByHash.get(blockHashHex) ?? null, - getBlocksByNumber: async (): Promise => { - throw Error("Not implemented"); - }, - getDepositEvents: async (): Promise => { - throw Error("Not implemented"); - }, - validateContract: async (): Promise => { - throw Error("Not implemented"); - }, - getState: () => Eth1ProviderState.ONLINE, - }; - - await runFindMergeBlockTest(eth1Provider, blocks.at(-1) as EthJsonRpcBlockRaw); - }); - - it("Should find terminal pow block fetching past blocks", async () => { - // Set current network totalDifficulty to behind terminalTotalDifficulty by 5. - // Then on each call to getBlockByNumber("latest") increase totalDifficulty by 1. - - const numOfBlocks = 5; - const difficulty = 1; - const ttdOffset = 1 * difficulty; - const hashOffset = 100; - const blocks: EthJsonRpcBlockRaw[] = []; - - for (let i = 0; i < numOfBlocks * 2; i++) { - const block: EthJsonRpcBlockRaw = { - number: toHex(hashOffset + i), - hash: toRootHex(hashOffset + i + 1), - parentHash: toRootHex(hashOffset + i), - // Latest block is under TTD, so past block search is stopped - totalDifficulty: toHex(terminalTotalDifficulty + i * difficulty - ttdOffset), - timestamp: "0x0", - }; - blocks.push(block); - } - - // Before last block (with ttdOffset = 1) is the merge block - const expectedMergeBlock = blocks[ttdOffset]; - - const eth1Provider = mockEth1ProviderFromBlocks(blocks); - await runFindMergeBlockTest(eth1Provider, expectedMergeBlock); - }); - - it("Should find terminal pow block fetching past blocks till genesis", async () => { - // There's no block with TD < TTD, searcher should stop at genesis block - - const numOfBlocks = 5; - const difficulty = 1; - const blocks: EthJsonRpcBlockRaw[] = []; - - for (let i = 0; i < numOfBlocks * 2; i++) { - const block: EthJsonRpcBlockRaw = { - number: toHex(i), - hash: toRootHex(i + 1), - parentHash: toRootHex(i), - // Latest block is under TTD, so past block search is stopped - totalDifficulty: toHex(terminalTotalDifficulty + i * difficulty + 1), - timestamp: "0x0", - }; - blocks.push(block); - } - - // Merge block must be genesis block - const expectedMergeBlock = blocks[0]; - - const eth1Provider = mockEth1ProviderFromBlocks(blocks); - await runFindMergeBlockTest(eth1Provider, expectedMergeBlock); - }); - - function mockEth1ProviderFromBlocks(blocks: EthJsonRpcBlockRaw[]): IEth1Provider { - const blocksByHash = new Map(); - - for (const block of blocks) { - blocksByHash.set(block.hash, block); - } - - return { - deployBlock: 0, - getBlockNumber: async () => 0, - getBlockByNumber: async (blockNumber) => { - // Always return the same block with totalDifficulty > TTD and unknown parent - if (blockNumber === "latest") return blocks.at(-1) as EthJsonRpcBlockRaw; - return blocks[blockNumber]; - }, - getBlockByHash: async (blockHashHex) => blocksByHash.get(blockHashHex) ?? null, - getBlocksByNumber: async (from, to) => blocks.slice(from, to), - getDepositEvents: async (): Promise => { - throw Error("Not implemented"); - }, - validateContract: async (): Promise => { - throw Error("Not implemented"); - }, - getState: () => Eth1ProviderState.ONLINE, - }; - } - - async function runFindMergeBlockTest( - eth1Provider: IEth1Provider, - expectedMergeBlock: EthJsonRpcBlockRaw - ): Promise { - const eth1MergeBlockTracker = new Eth1MergeBlockTracker( - { - config, - logger, - signal: controller.signal, - metrics: null, - }, - eth1Provider - ); - eth1MergeBlockTracker.startPollingMergeBlock(); - - // Wait for Eth1MergeBlockTracker to find at least one merge block - while (!controller.signal.aborted) { - if (await eth1MergeBlockTracker.getTerminalPowBlock()) break; - await sleep(10, controller.signal); - } - - // Status should acknowlege merge block is found - expect(eth1MergeBlockTracker["status"].code).toBe(StatusCode.FOUND); - - // Given the total difficulty offset the block that has TTD is the `difficultyOffset`nth block - expect(await eth1MergeBlockTracker.getTerminalPowBlock()).toEqual(toPowBlock(expectedMergeBlock)); - } -}); - -function toHex(num: number | bigint): string { - return "0x" + num.toString(16); -} - -function toRootHex(num: number): string { - return "0x" + num.toString(16).padStart(64, "0"); -} diff --git a/packages/beacon-node/test/utils/networkWithMockDb.ts b/packages/beacon-node/test/utils/networkWithMockDb.ts index 5572a7cba9..5194713fd3 100644 --- a/packages/beacon-node/test/utils/networkWithMockDb.ts +++ b/packages/beacon-node/test/utils/networkWithMockDb.ts @@ -48,7 +48,6 @@ export async function getNetworkForTest( const chain = new BeaconChain( { - safeSlotsToImportOptimistically: 0, archiveStateEpochFrequency: 0, suggestedFeeRecipient: "", blsVerifyAllMainThread: true, diff --git a/packages/beacon-node/test/utils/runEl.ts b/packages/beacon-node/test/utils/runEl.ts index ae5ad992ae..1614a899b0 100644 --- a/packages/beacon-node/test/utils/runEl.ts +++ b/packages/beacon-node/test/utils/runEl.ts @@ -9,16 +9,10 @@ import {shell} from "../sim/shell.js"; let txRpcId = 1; -export enum ELStartMode { - PreMerge = "pre-merge", - PostMerge = "post-merge", -} - -export type ELSetupConfig = {mode: ELStartMode; elScriptDir: string; elBinaryDir: string; genesisTemplate?: string}; +export type ELSetupConfig = {elScriptDir: string; elBinaryDir: string; genesisTemplate?: string}; export type ELRunOptions = {ttd: bigint; dataPath: string; jwtSecretHex: string; enginePort: number; ethPort: number}; export type ELClient = { genesisBlockHash: string; - ttd: bigint; engineRpcUrl: string; ethRpcUrl: string; network: string; @@ -26,7 +20,7 @@ export type ELClient = { }; /** - * A util function to start an EL in a "pre-merge" or "post-merge" mode using an `elScriptDir` setup + * A util function to start an EL using an `elScriptDir` setup * scripts folder in packages/beacon-node/test/scripts/el-interop. * * Returns an ELRunConfig after starting the EL, which can be used to initialize the genesis @@ -34,11 +28,11 @@ export type ELClient = { */ export async function runEL( - {mode, elScriptDir, elBinaryDir, genesisTemplate: template}: ELSetupConfig, + {elScriptDir, elBinaryDir, genesisTemplate: template}: ELSetupConfig, {ttd, dataPath, jwtSecretHex, enginePort, ethPort}: ELRunOptions, signal: AbortSignal ): Promise<{elClient: ELClient; tearDownCallBack: () => Promise}> { - const network = `${elScriptDir}/${mode}`; + const network = `${elScriptDir}`; const ethRpcUrl = `http://127.0.0.1:${ethPort}`; const engineRpcUrl = `http://127.0.0.1:${enginePort}`; const genesisTemplate = template ?? "genesisPre.tmpl"; diff --git a/packages/cli/src/config/beaconParams.ts b/packages/cli/src/config/beaconParams.ts index 7abb0e9a55..db01dfc57f 100644 --- a/packages/cli/src/config/beaconParams.ts +++ b/packages/cli/src/config/beaconParams.ts @@ -6,13 +6,7 @@ import { createChainForkConfig, } from "@lodestar/config"; import {NetworkName, getNetworkBeaconParams} from "../networks/index.js"; -import { - GlobalArgs, - ITerminalPowArgs, - defaultNetwork, - parseBeaconParamsArgs, - parseTerminalPowArgs, -} from "../options/index.js"; +import {GlobalArgs, defaultNetwork, parseBeaconParamsArgs} from "../options/index.js"; import {readFile} from "../util/index.js"; import {IBeaconParamsUnparsed} from "./types.js"; @@ -44,7 +38,6 @@ export function getBeaconParamsFromArgs(args: GlobalArgs): ChainConfig { paramsFile: args.paramsFile, additionalParamsCli: { ...parseBeaconParamsArgs(args as IBeaconParamsUnparsed), - ...parseTerminalPowArgs(args as ITerminalPowArgs), }, }); } diff --git a/packages/cli/src/options/beaconNodeOptions/chain.ts b/packages/cli/src/options/beaconNodeOptions/chain.ts index 9987883a63..04af60b908 100644 --- a/packages/cli/src/options/beaconNodeOptions/chain.ts +++ b/packages/cli/src/options/beaconNodeOptions/chain.ts @@ -23,7 +23,6 @@ export type ChainArgs = { "chain.computeUnrealized"?: boolean; "chain.assertCorrectProgressiveBalances"?: boolean; "chain.maxSkipSlots"?: number; - "safe-slots-to-import-optimistically": number; emitPayloadAttributes?: boolean; broadcastValidationStrictness?: string; "chain.minSameMessageSignatureSetsToBatch"?: number; @@ -63,7 +62,6 @@ export function parseArgs(args: ChainArgs): IBeaconNodeOptions["chain"] { computeUnrealized: args["chain.computeUnrealized"], assertCorrectProgressiveBalances: args["chain.assertCorrectProgressiveBalances"], maxSkipSlots: args["chain.maxSkipSlots"], - safeSlotsToImportOptimistically: args["safe-slots-to-import-optimistically"], emitPayloadAttributes: args.emitPayloadAttributes, broadcastValidationStrictness: args.broadcastValidationStrictness, minSameMessageSignatureSetsToBatch: @@ -227,15 +225,6 @@ Will double processing times. Use only for debugging purposes.", group: "chain", }, - "safe-slots-to-import-optimistically": { - hidden: true, - type: "number", - description: - "Slots from current (clock) slot till which its safe to import a block optimistically if the merge is not justified yet.", - default: defaultOptions.chain.safeSlotsToImportOptimistically, - group: "chain", - }, - "chain.archiveStateEpochFrequency": { description: "Minimum number of epochs between archived states", default: defaultOptions.chain.archiveStateEpochFrequency, diff --git a/packages/cli/src/options/paramsOptions.ts b/packages/cli/src/options/paramsOptions.ts index bd89ffa77d..787cbdaae1 100644 --- a/packages/cli/src/options/paramsOptions.ts +++ b/packages/cli/src/options/paramsOptions.ts @@ -1,4 +1,4 @@ -import {ChainConfig, chainConfigTypes} from "@lodestar/config"; +import {chainConfigTypes} from "@lodestar/config"; import {CliCommandOptions, CliOptionDefinition} from "@lodestar/utils"; import {IBeaconParamsUnparsed} from "../config/types.js"; import {ObjectKeys} from "../util/index.js"; @@ -7,12 +7,7 @@ import {ObjectKeys} from "../util/index.js"; // If an arbitrary key notation is used, it removes type safety on most of this CLI arg parsing code. // Params will be parsed from an args object assuming to contain the required keys -export type ITerminalPowArgs = { - "terminal-total-difficulty-override"?: string; - "terminal-block-hash-override"?: string; - "terminal-block-hash-epoch-override"?: string; -}; -export type IParamsArgs = Record & ITerminalPowArgs; +export type IParamsArgs = Record; const getArgKey = (key: keyof IBeaconParamsUnparsed): string => `params.${key}`; @@ -24,7 +19,7 @@ export function parseBeaconParamsArgs(args: Record): IB }, {}); } -const paramsOptionsByName = ObjectKeys(chainConfigTypes).reduce( +export const paramsOptions: CliCommandOptions = ObjectKeys(chainConfigTypes).reduce( (options: Record, key): Record => { options[getArgKey(key)] = { hidden: true, @@ -35,38 +30,3 @@ const paramsOptionsByName = ObjectKeys(chainConfigTypes).reduce( }, {} ); - -const terminalArgsToParamsMap: {[K in keyof ITerminalPowArgs]: keyof ChainConfig} = { - "terminal-total-difficulty-override": "TERMINAL_TOTAL_DIFFICULTY", - "terminal-block-hash-override": "TERMINAL_BLOCK_HASH", - "terminal-block-hash-epoch-override": "TERMINAL_BLOCK_HASH_ACTIVATION_EPOCH", -}; - -export function parseTerminalPowArgs(args: ITerminalPowArgs): IBeaconParamsUnparsed { - const parsedArgs = ObjectKeys(terminalArgsToParamsMap).reduce((beaconParams: Partial, key) => { - const paramOption = terminalArgsToParamsMap[key]; - const value = args[key]; - if (paramOption != null && value != null) beaconParams[paramOption] = value; - return beaconParams; - }, {}); - return parsedArgs; -} - -export const paramsOptions: CliCommandOptions = { - ...paramsOptionsByName, - - "terminal-total-difficulty-override": { - description: "Terminal PoW block TTD override", - type: "string", - }, - - "terminal-block-hash-override": { - description: "Terminal PoW block hash override", - type: "string", - }, - - "terminal-block-hash-epoch-override": { - description: "Terminal PoW block hash override activation epoch", - type: "string", - }, -}; diff --git a/packages/cli/test/sim/multiFork.test.ts b/packages/cli/test/sim/multiFork.test.ts index ce227db848..6974fe135b 100644 --- a/packages/cli/test/sim/multiFork.test.ts +++ b/packages/cli/test/sim/multiFork.test.ts @@ -2,7 +2,6 @@ import path from "node:path"; import {createAccountBalanceAssertion} from "../utils/crucible/assertions/accountBalanceAssertion.js"; import {createExecutionHeadAssertion} from "../utils/crucible/assertions/executionHeadAssertion.js"; import {createForkAssertion} from "../utils/crucible/assertions/forkAssertion.js"; -import {mergeAssertion} from "../utils/crucible/assertions/mergeAssertion.js"; import {nodeAssertion} from "../utils/crucible/assertions/nodeAssertion.js"; import {createWithdrawalAssertions} from "../utils/crucible/assertions/withdrawalsAssertion.js"; import {BeaconClient, ExecutionClient, Match, ValidatorClient} from "../utils/crucible/interfaces.js"; @@ -116,14 +115,6 @@ env.tracker.register({ }, }); -env.tracker.register({ - ...mergeAssertion, - match: ({slot}) => { - // Check at the end of bellatrix fork, merge should happen by then - return slot === env.clock.getLastSlotOfEpoch(bellatrixForkEpoch) ? Match.Assert | Match.Remove : Match.None; - }, -}); - env.tracker.register( createAccountBalanceAssertion({ address: "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", diff --git a/packages/cli/test/unit/options/beaconNodeOptions.test.ts b/packages/cli/test/unit/options/beaconNodeOptions.test.ts index ac02498621..c32a35132f 100644 --- a/packages/cli/test/unit/options/beaconNodeOptions.test.ts +++ b/packages/cli/test/unit/options/beaconNodeOptions.test.ts @@ -33,7 +33,6 @@ describe("options / beaconNodeOptions", () => { suggestedFeeRecipient: "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "chain.assertCorrectProgressiveBalances": true, "chain.maxSkipSlots": 100, - "safe-slots-to-import-optimistically": 256, "chain.archiveStateEpochFrequency": 1024, "chain.minSameMessageSignatureSetsToBatch": 32, "chain.maxShufflingCacheEpochs": 100, @@ -139,7 +138,6 @@ describe("options / beaconNodeOptions", () => { preaggregateSlotDistance: 1, attDataCacheSlotDistance: 2, computeUnrealized: true, - safeSlotsToImportOptimistically: 256, suggestedFeeRecipient: "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", assertCorrectProgressiveBalances: true, maxSkipSlots: 100, diff --git a/packages/cli/test/utils/crucible/assertions/mergeAssertion.ts b/packages/cli/test/utils/crucible/assertions/mergeAssertion.ts deleted file mode 100644 index 6e0c1a63d7..0000000000 --- a/packages/cli/test/utils/crucible/assertions/mergeAssertion.ts +++ /dev/null @@ -1,21 +0,0 @@ -import {BeaconStateAllForks, isExecutionStateType, isMergeTransitionComplete} from "@lodestar/state-transition"; -import {Assertion, AssertionResult} from "../interfaces.js"; -import {neverMatcher} from "./matchers.js"; - -export const mergeAssertion: Assertion<"merge", string> = { - id: "merge", - // Include into particular test with custom condition - match: neverMatcher, - async assert({node}) { - const errors: AssertionResult[] = []; - - const res = await node.beacon.api.debug.getStateV2({stateId: "head"}); - const state = res.value() as unknown as BeaconStateAllForks; - - if (!(isExecutionStateType(state) && isMergeTransitionComplete(state))) { - errors.push("Node has not yet completed the merged transition"); - } - - return errors; - }, -}; diff --git a/packages/cli/test/utils/crucible/clients/execution/geth.ts b/packages/cli/test/utils/crucible/clients/execution/geth.ts index 6de53f9543..0b59273e32 100644 --- a/packages/cli/test/utils/crucible/clients/execution/geth.ts +++ b/packages/cli/test/utils/crucible/clients/execution/geth.ts @@ -3,7 +3,7 @@ import path from "node:path"; import {Web3} from "web3"; import {fetch} from "@lodestar/utils"; import {EL_GENESIS_PASSWORD, EL_GENESIS_SECRET_KEY, SHARED_JWT_SECRET, SIM_ENV_NETWORK_ID} from "../../constants.js"; -import {ExecutionClient, ExecutionNodeGenerator, ExecutionStartMode, JobOptions, RunnerType} from "../../interfaces.js"; +import {ExecutionClient, ExecutionNodeGenerator, JobOptions, RunnerType} from "../../interfaces.js"; import {getNodeMountedPaths} from "../../utils/paths.js"; import {getNodePorts} from "../../utils/ports.js"; import {registerWeb3JsPlugins} from "../../web3js/plugins/index.js"; @@ -13,7 +13,7 @@ export const generateGethNode: ExecutionNodeGenerator = (o throw new Error("GETH_BINARY_DIR or GETH_DOCKER_IMAGE must be provided"); } - const {id, mode, ttd, address, mining, clientOptions, nodeIndex} = opts; + const {id, ttd, address, mining, clientOptions, nodeIndex} = opts; const ports = getNodePorts(nodeIndex); const isDocker = !!process.env.GETH_DOCKER_IMAGE; @@ -136,7 +136,6 @@ export const generateGethNode: ExecutionNodeGenerator = (o "--verbosity", "5", ...(mining ? ["--mine"] : []), - ...(mode === ExecutionStartMode.PreMerge ? ["--nodiscover"] : []), ...clientOptions, ], env: {}, diff --git a/packages/cli/test/utils/crucible/clients/execution/index.ts b/packages/cli/test/utils/crucible/clients/execution/index.ts index 14ba093ed6..b40a0a1d29 100644 --- a/packages/cli/test/utils/crucible/clients/execution/index.ts +++ b/packages/cli/test/utils/crucible/clients/execution/index.ts @@ -6,7 +6,6 @@ import { ExecutionGeneratorOptions, ExecutionGenesisOptions, ExecutionNode, - ExecutionStartMode, } from "../../interfaces.js"; import {getGethGenesisBlock} from "../../utils/executionGenesis.js"; import {getEstimatedForkTime} from "../../utils/index.js"; @@ -57,16 +56,13 @@ export async function createExecutionNode( ...options, ...genesisOptions, id: elId, - mode: - options.mode ?? - (forkConfig.BELLATRIX_FORK_EPOCH > 0 ? ExecutionStartMode.PreMerge : ExecutionStartMode.PostMerge), address: runner.getNextIp(), mining: options.mining ?? false, }; await ensureDirectories(opts.paths); await writeFile(opts.paths.jwtsecretFilePath, SHARED_JWT_SECRET); - await writeFile(opts.paths.genesisFilePath, JSON.stringify(getGethGenesisBlock(opts.mode, genesisOptions))); + await writeFile(opts.paths.genesisFilePath, JSON.stringify(getGethGenesisBlock(genesisOptions))); switch (client) { case ExecutionClient.Mock: { diff --git a/packages/cli/test/utils/crucible/clients/execution/nethermind.ts b/packages/cli/test/utils/crucible/clients/execution/nethermind.ts index 92615aeb4c..7a85584c15 100644 --- a/packages/cli/test/utils/crucible/clients/execution/nethermind.ts +++ b/packages/cli/test/utils/crucible/clients/execution/nethermind.ts @@ -16,7 +16,6 @@ export const generateNethermindNode: ExecutionNodeGenerator; [BeaconClient.Lighthouse]: Record; @@ -137,7 +132,6 @@ export interface ExecutionGenesisOptions extends ExecutionGenesisOptions, GeneratorOptions { - mode: ExecutionStartMode; mining: boolean; paths: ExecutionPaths; clientOptions: ExecutionClientsOptions[E]; diff --git a/packages/cli/test/utils/crucible/utils/executionGenesis.ts b/packages/cli/test/utils/crucible/utils/executionGenesis.ts index 7250c147e2..82c8036c71 100644 --- a/packages/cli/test/utils/crucible/utils/executionGenesis.ts +++ b/packages/cli/test/utils/crucible/utils/executionGenesis.ts @@ -1,10 +1,7 @@ import {SIM_ENV_CHAIN_ID, SIM_ENV_NETWORK_ID} from "../constants.js"; -import {Eth1GenesisBlock, ExecutionGenesisOptions, ExecutionStartMode} from "../interfaces.js"; +import {Eth1GenesisBlock, ExecutionGenesisOptions} from "../interfaces.js"; -export const getGethGenesisBlock = ( - mode: ExecutionStartMode, - options: ExecutionGenesisOptions -): Record => { +export const getGethGenesisBlock = (options: ExecutionGenesisOptions): Record => { const {ttd, cliqueSealingPeriod, shanghaiTime, genesisTime, cancunTime, pragueTime} = options; const genesis = { @@ -63,20 +60,12 @@ export const getGethGenesisBlock = ( baseFeePerGas: "0x0", }; - if (mode === ExecutionStartMode.PreMerge) { - return genesis; - } - - // TODO: Figure out PostMerge genesis later return genesis; }; -export const getNethermindChainSpec = ( - mode: ExecutionStartMode, - options: ExecutionGenesisOptions -): Record => { +export const getNethermindChainSpec = (options: ExecutionGenesisOptions): Record => { const {ttd, shanghaiTime} = options; - const genesis = getGethGenesisBlock(mode, options) as Eth1GenesisBlock; + const genesis = getGethGenesisBlock(options) as Eth1GenesisBlock; return { name: "simulation-dev", diff --git a/packages/config/src/chainConfig/types.ts b/packages/config/src/chainConfig/types.ts index 5cf0d79549..41e33cf84a 100644 --- a/packages/config/src/chainConfig/types.ts +++ b/packages/config/src/chainConfig/types.ts @@ -15,8 +15,11 @@ export type ChainConfig = { CONFIG_NAME: string; // Transition + /** @deprecated All networks have completed the merge transition */ TERMINAL_TOTAL_DIFFICULTY: bigint; + /** @deprecated All networks have completed the merge transition */ TERMINAL_BLOCK_HASH: Uint8Array; + /** @deprecated All networks have completed the merge transition */ TERMINAL_BLOCK_HASH_ACTIVATION_EPOCH: number; // Genesis diff --git a/packages/fork-choice/src/forkChoice/forkChoice.ts b/packages/fork-choice/src/forkChoice/forkChoice.ts index f6c405e4b4..02234e8f73 100644 --- a/packages/fork-choice/src/forkChoice/forkChoice.ts +++ b/packages/fork-choice/src/forkChoice/forkChoice.ts @@ -1,4 +1,4 @@ -import {ChainConfig, ChainForkConfig} from "@lodestar/config"; +import {ChainForkConfig} from "@lodestar/config"; import {SLOTS_PER_EPOCH, SLOTS_PER_HISTORICAL_ROOT} from "@lodestar/params"; import { CachedBeaconStateAllForks, @@ -10,7 +10,6 @@ import { computeStartSlotAtEpoch, getAttesterSlashableIndices, isExecutionBlockBodyType, - isExecutionEnabled, isExecutionStateType, } from "@lodestar/state-transition"; import {computeUnrealizedCheckpoints} from "@lodestar/state-transition/epoch"; @@ -23,7 +22,6 @@ import { RootHex, Slot, ValidatorIndex, - bellatrix, phase0, ssz, } from "@lodestar/types"; @@ -49,7 +47,6 @@ import { EpochDifference, IForkChoice, NotReorgedReason, - PowBlockHex, ShouldOverrideForkChoiceUpdateResult, } from "./interface.js"; import {CheckpointWithHex, IForkChoiceStore, JustifiedBalances, toCheckpointWithHex} from "./store.js"; @@ -656,16 +653,6 @@ export class ForkChoice implements IForkChoice { this.proposerBoostRoot = blockRootHex; } - // As per specs, we should be validating here the terminal conditions of - // the PoW if this were a merge transition block. - // (https://github.com/ethereum/consensus-specs/blob/dev/specs/bellatrix/fork-choice.md#on_block) - // - // However this check has been moved to the `verifyBlockStateTransition` in - // `packages/beacon-node/src/chain/blocks/verifyBlock.ts` as: - // - // 1. Its prudent to fail fast and not try importing a block in forkChoice. - // 2. Also the data to run such a validation is readily available there. - const justifiedCheckpoint = toCheckpointWithHex(state.currentJustifiedCheckpoint); const finalizedCheckpoint = toCheckpointWithHex(state.finalizedCheckpoint); const stateJustifiedEpoch = justifiedCheckpoint.epoch; @@ -754,7 +741,7 @@ export class ForkChoice implements IForkChoice { unrealizedFinalizedEpoch: unrealizedFinalizedCheckpoint.epoch, unrealizedFinalizedRoot: unrealizedFinalizedCheckpoint.rootHex, - ...(isExecutionBlockBodyType(block.body) && isExecutionStateType(state) && isExecutionEnabled(state, block) + ...(isExecutionBlockBodyType(block.body) && isExecutionStateType(state) ? { executionPayloadBlockHash: toRootHex(block.body.executionPayload.blockHash), executionPayloadNumber: block.body.executionPayload.blockNumber, @@ -1609,65 +1596,6 @@ export class ForkChoice implements IForkChoice { } } -/** - * This function checks the terminal pow conditions on the merge block as - * specified in the config either via TTD or TBH. This function is part of - * forkChoice because if the merge block was previously imported as syncing - * and the EL eventually signals it catching up via validateLatestHash - * the specs mandates validating terminal conditions on the previously - * imported merge block. - */ -export function assertValidTerminalPowBlock( - config: ChainConfig, - block: bellatrix.BeaconBlock, - preCachedData: { - executionStatus: ExecutionStatus.Syncing | ExecutionStatus.Valid; - powBlock?: PowBlockHex | null; - powBlockParent?: PowBlockHex | null; - } -): void { - if (!ssz.Root.equals(config.TERMINAL_BLOCK_HASH, ZERO_HASH)) { - if (computeEpochAtSlot(block.slot) < config.TERMINAL_BLOCK_HASH_ACTIVATION_EPOCH) - throw Error(`Terminal block activation epoch ${config.TERMINAL_BLOCK_HASH_ACTIVATION_EPOCH} not reached`); - - // powBock.blockHash is hex, so we just pick the corresponding root - if (!ssz.Root.equals(block.body.executionPayload.parentHash, config.TERMINAL_BLOCK_HASH)) - throw new Error( - `Invalid terminal block hash, expected: ${toRootHex(config.TERMINAL_BLOCK_HASH)}, actual: ${toRootHex( - block.body.executionPayload.parentHash - )}` - ); - } else { - // If no TERMINAL_BLOCK_HASH override, check ttd - - // Delay powBlock checks if the payload execution status is unknown because of - // syncing response in notifyNewPayload call while verifying - if (preCachedData?.executionStatus === ExecutionStatus.Syncing) return; - - const {powBlock, powBlockParent} = preCachedData; - if (!powBlock) throw Error("onBlock preCachedData must include powBlock"); - // if powBlock is genesis don't assert powBlockParent - if (!powBlockParent && powBlock.parentHash !== HEX_ZERO_HASH) - throw Error("onBlock preCachedData must include powBlockParent"); - - const isTotalDifficultyReached = powBlock.totalDifficulty >= config.TERMINAL_TOTAL_DIFFICULTY; - // If we don't have powBlockParent here, powBlock is the genesis and as we would have errored above - // we can mark isParentTotalDifficultyValid as valid - const isParentTotalDifficultyValid = - !powBlockParent || powBlockParent.totalDifficulty < config.TERMINAL_TOTAL_DIFFICULTY; - if (!isTotalDifficultyReached) { - throw Error( - `Invalid terminal POW block: total difficulty not reached expected >= ${config.TERMINAL_TOTAL_DIFFICULTY}, actual = ${powBlock.totalDifficulty}` - ); - } - - if (!isParentTotalDifficultyValid) { - throw Error( - `Invalid terminal POW block parent: expected < ${config.TERMINAL_TOTAL_DIFFICULTY}, actual = ${powBlockParent.totalDifficulty}` - ); - } - } -} // Approximate https://github.com/ethereum/consensus-specs/blob/dev/specs/phase0/fork-choice.md#calculate_committee_fraction // Calculates proposer boost score when committeePercent = config.PROPOSER_SCORE_BOOST export function getCommitteeFraction( diff --git a/packages/fork-choice/src/forkChoice/interface.ts b/packages/fork-choice/src/forkChoice/interface.ts index 51ee5bb28f..32d9b6c68f 100644 --- a/packages/fork-choice/src/forkChoice/interface.ts +++ b/packages/fork-choice/src/forkChoice/interface.ts @@ -246,10 +246,3 @@ export interface IForkChoice { */ getDependentRoot(block: ProtoBlock, atEpochDiff: EpochDifference): RootHex; } - -/** Same to the PowBlock but we want RootHex to work with forkchoice conveniently */ -export type PowBlockHex = { - blockHash: RootHex; - parentHash: RootHex; - totalDifficulty: bigint; -}; diff --git a/packages/fork-choice/src/index.ts b/packages/fork-choice/src/index.ts index 59c7553a19..e476660ee9 100644 --- a/packages/fork-choice/src/index.ts +++ b/packages/fork-choice/src/index.ts @@ -6,14 +6,13 @@ export { type InvalidBlock, InvalidBlockCode, } from "./forkChoice/errors.js"; -export {ForkChoice, type ForkChoiceOpts, UpdateHeadOpt, assertValidTerminalPowBlock} from "./forkChoice/forkChoice.js"; +export {ForkChoice, type ForkChoiceOpts, UpdateHeadOpt} from "./forkChoice/forkChoice.js"; export { type AncestorResult, AncestorStatus, EpochDifference, type IForkChoice, NotReorgedReason, - type PowBlockHex, } from "./forkChoice/interface.js"; export * from "./forkChoice/safeBlocks.js"; export { diff --git a/packages/fork-choice/test/unit/forkChoice/utils.test.ts b/packages/fork-choice/test/unit/forkChoice/utils.test.ts deleted file mode 100644 index c029f82eb8..0000000000 --- a/packages/fork-choice/test/unit/forkChoice/utils.test.ts +++ /dev/null @@ -1,71 +0,0 @@ -import {describe, expect, it} from "vitest"; -import {createChainForkConfig} from "@lodestar/config"; -import {ssz} from "@lodestar/types"; -import {ExecutionStatus, assertValidTerminalPowBlock} from "../../../src/index.js"; - -describe("assertValidTerminalPowBlock", () => { - const config = createChainForkConfig({TERMINAL_TOTAL_DIFFICULTY: BigInt(10)}); - const block = ssz.bellatrix.BeaconBlock.defaultValue(); - const executionStatus = ExecutionStatus.Valid; - it("should accept ttd >= genesis block as terminal without powBlockParent", () => { - const powBlock = { - blockHash: "0x" + "ab".repeat(32), - // genesis powBlock will have zero parent hash - parentHash: "0x" + "00".repeat(32), - totalDifficulty: BigInt(10), - }; - expect(() => - assertValidTerminalPowBlock(config, block, {executionStatus, powBlockParent: null, powBlock}) - ).not.toThrow(); - }); - - it("should require powBlockParent if powBlock not genesis", () => { - const powBlock = { - blockHash: "0x" + "ab".repeat(32), - // genesis powBlock will have non zero parent hash - parentHash: "0x" + "01".repeat(32), - totalDifficulty: BigInt(10), - }; - expect(() => - assertValidTerminalPowBlock(config, block, {executionStatus, powBlockParent: null, powBlock}) - ).toThrow(); - }); - - it("should require powBlock >= ttd", () => { - const powBlock = { - blockHash: "0x" + "ab".repeat(32), - // genesis powBlock will have non zero parent hash - parentHash: "0x" + "01".repeat(32), - totalDifficulty: BigInt(9), - }; - expect(() => - assertValidTerminalPowBlock(config, block, {executionStatus, powBlockParent: powBlock, powBlock}) - ).toThrow(); - }); - - it("should require powBlockParent < ttd", () => { - const powBlock = { - blockHash: "0x" + "ab".repeat(32), - // genesis powBlock will have non zero parent hash - parentHash: "0x" + "01".repeat(32), - totalDifficulty: BigInt(10), - }; - expect(() => - assertValidTerminalPowBlock(config, block, {executionStatus, powBlockParent: powBlock, powBlock}) - ).toThrow(); - }); - - it("should accept powBlockParent < ttd and powBlock >= ttd", () => { - const powBlock = { - blockHash: "0x" + "ab".repeat(32), - // genesis powBlock will have non zero parent hash - parentHash: "0x" + "01".repeat(32), - totalDifficulty: BigInt(10), - }; - const powBlockParent = { - ...powBlock, - totalDifficulty: BigInt(9), - }; - expect(() => assertValidTerminalPowBlock(config, block, {executionStatus, powBlockParent, powBlock})).not.toThrow(); - }); -}); diff --git a/packages/params/src/index.ts b/packages/params/src/index.ts index bcee6d82e1..abecfb0243 100644 --- a/packages/params/src/index.ts +++ b/packages/params/src/index.ts @@ -271,6 +271,7 @@ export const MAX_REQUEST_LIGHT_CLIENT_COMMITTEE_HASHES = 128; /** * Optimistic sync + * @deprecated All networks have completed the merge transition, blocks are always safe to import optimistically. */ export const SAFE_SLOTS_TO_IMPORT_OPTIMISTICALLY = 128; /** @deprecated */ diff --git a/packages/state-transition/src/block/index.ts b/packages/state-transition/src/block/index.ts index b2a2ad06f6..a5d67e4c75 100644 --- a/packages/state-transition/src/block/index.ts +++ b/packages/state-transition/src/block/index.ts @@ -7,7 +7,7 @@ import { CachedBeaconStateCapella, CachedBeaconStateGloas, } from "../types.js"; -import {getFullOrBlindedPayload, isExecutionEnabled} from "../util/execution.js"; +import {getFullOrBlindedPayload} from "../util/execution.js"; import {BlockExternalData, DataAvailabilityStatus} from "./externalData.js"; import {processBlobKzgCommitments} from "./processBlobKzgCommitments.js"; import {processBlockHeader} from "./processBlockHeader.js"; @@ -67,11 +67,7 @@ export function processBlock( // The call to the process_execution_payload must happen before the call to the process_randao as the former depends // on the randao_mix computed with the reveal of the previous block. // TODO GLOAS: We call processExecutionPayload somewhere else post-gloas - if ( - fork >= ForkSeq.bellatrix && - fork < ForkSeq.gloas && - isExecutionEnabled(state as CachedBeaconStateBellatrix, block) - ) { + if (fork >= ForkSeq.bellatrix && fork < ForkSeq.gloas) { processExecutionPayload(fork, state as CachedBeaconStateBellatrix, block.body, externalData); } diff --git a/packages/state-transition/src/block/processExecutionPayload.ts b/packages/state-transition/src/block/processExecutionPayload.ts index 0af784074c..65f28822a0 100644 --- a/packages/state-transition/src/block/processExecutionPayload.ts +++ b/packages/state-transition/src/block/processExecutionPayload.ts @@ -3,11 +3,7 @@ import {ForkName, ForkSeq, isForkPostDeneb} from "@lodestar/params"; import {BeaconBlockBody, BlindedBeaconBlockBody, deneb, isExecutionPayload} from "@lodestar/types"; import {toHex, toRootHex} from "@lodestar/utils"; import {CachedBeaconStateBellatrix, CachedBeaconStateCapella} from "../types.js"; -import { - executionPayloadToPayloadHeader, - getFullOrBlindedPayloadFromBody, - isMergeTransitionComplete, -} from "../util/execution.js"; +import {executionPayloadToPayloadHeader, getFullOrBlindedPayloadFromBody} from "../util/execution.js"; import {computeEpochAtSlot, computeTimeAtSlot, getRandaoMix} from "../util/index.js"; import {BlockExternalData, ExecutionPayloadStatus} from "./externalData.js"; @@ -21,15 +17,13 @@ export function processExecutionPayload( const forkName = ForkName[ForkSeq[fork] as ForkName]; // Verify consistency of the parent hash, block number, base fee per gas and gas limit // with respect to the previous execution payload header - if (isMergeTransitionComplete(state)) { - const {latestExecutionPayloadHeader} = state; - if (!byteArrayEquals(payload.parentHash, latestExecutionPayloadHeader.blockHash)) { - throw Error( - `Invalid execution payload parentHash ${toRootHex(payload.parentHash)} latest blockHash ${toRootHex( - latestExecutionPayloadHeader.blockHash - )}` - ); - } + const {latestExecutionPayloadHeader} = state; + if (!byteArrayEquals(payload.parentHash, latestExecutionPayloadHeader.blockHash)) { + throw Error( + `Invalid execution payload parentHash ${toRootHex(payload.parentHash)} latest blockHash ${toRootHex( + latestExecutionPayloadHeader.blockHash + )}` + ); } // Verify random diff --git a/packages/state-transition/src/util/execution.ts b/packages/state-transition/src/util/execution.ts index a5ad0be816..a64f21a086 100644 --- a/packages/state-transition/src/util/execution.ts +++ b/packages/state-transition/src/util/execution.ts @@ -2,7 +2,6 @@ import {ForkName, ForkPostBellatrix, ForkPreGloas, ForkSeq} from "@lodestar/para import { BeaconBlock, BeaconBlockBody, - BlindedBeaconBlock, BlindedBeaconBlockBody, ExecutionPayload, ExecutionPayloadHeader, @@ -10,75 +9,16 @@ import { capella, deneb, isBlindedBeaconBlockBody, - isExecutionPayload, ssz, } from "@lodestar/types"; import { BeaconStateAllForks, - BeaconStateBellatrix, BeaconStateCapella, BeaconStateExecutions, CachedBeaconStateAllForks, CachedBeaconStateExecutions, } from "../types.js"; -/** - * Execution enabled = merge is done. - * When (A) state has execution data OR (B) block has execution data - */ -export function isExecutionEnabled(state: BeaconStateExecutions, block: BeaconBlock | BlindedBeaconBlock): boolean { - if (isMergeTransitionComplete(state)) { - return true; - } - - // Throws if not post-bellatrix block. A fork-guard in isExecutionEnabled() prevents this from happening - const payload = getFullOrBlindedPayload(block); - // Note: spec says to check all payload is zero-ed. However a state-root cannot be zero for any non-empty payload - // TODO: Consider comparing with the payload root if this assumption is not correct. - // return !byteArrayEquals(payload.stateRoot, ZERO_HASH); - - // UPDATE: stateRoot comparision should have been enough with zero hash, but spec tests were failing - // Revisit this later to fix specs and make this efficient - return isExecutionPayload(payload) - ? !ssz.bellatrix.ExecutionPayload.equals(payload, ssz.bellatrix.ExecutionPayload.defaultValue()) - : !ssz.bellatrix.ExecutionPayloadHeader.equals( - state.latestExecutionPayloadHeader, - // TODO: Performance - ssz.bellatrix.ExecutionPayloadHeader.defaultValue() - ); -} - -/** - * Merge block is the SINGLE block that transitions from POW to POS. - * state has no execution data AND this block has execution data - */ -export function isMergeTransitionBlock(state: BeaconStateExecutions, body: bellatrix.BeaconBlockBody): boolean { - return ( - !isMergeTransitionComplete(state) && - !ssz.bellatrix.ExecutionPayload.equals(body.executionPayload, ssz.bellatrix.ExecutionPayload.defaultValue()) - ); -} - -/** - * Merge is complete when the state includes execution layer data: - * state.latestExecutionPayloadHeader NOT EMPTY - */ -export function isMergeTransitionComplete(state: BeaconStateExecutions): boolean { - if (!isCapellaStateType(state)) { - return !ssz.bellatrix.ExecutionPayloadHeader.equals( - (state as BeaconStateBellatrix).latestExecutionPayloadHeader, - // TODO: Performance - ssz.bellatrix.ExecutionPayloadHeader.defaultValue() - ); - } - - return !ssz.capella.ExecutionPayloadHeader.equals( - state.latestExecutionPayloadHeader, - // TODO: Performance - ssz.capella.ExecutionPayloadHeader.defaultValue() - ); -} - /** Type guard for bellatrix.BeaconState */ export function isExecutionStateType(state: BeaconStateAllForks): state is BeaconStateExecutions { return (state as BeaconStateExecutions).latestExecutionPayloadHeader !== undefined; diff --git a/packages/types/src/bellatrix/types.ts b/packages/types/src/bellatrix/types.ts index dcd45d7c97..02ee66ed10 100644 --- a/packages/types/src/bellatrix/types.ts +++ b/packages/types/src/bellatrix/types.ts @@ -9,6 +9,7 @@ export type BeaconBlockBody = ValueOf; export type BeaconBlock = ValueOf; export type SignedBeaconBlock = ValueOf; export type BeaconState = ValueOf; +/** @deprecated */ export type PowBlock = ValueOf; export type BlindedBeaconBlockBody = ValueOf; diff --git a/packages/validator/src/util/params.ts b/packages/validator/src/util/params.ts index bd6cd2a535..0f3efaefcc 100644 --- a/packages/validator/src/util/params.ts +++ b/packages/validator/src/util/params.ts @@ -110,8 +110,8 @@ function getSpecCriticalParams(localConfig: ChainConfig): Record Date: Fri, 12 Dec 2025 23:29:41 +0700 Subject: [PATCH 04/20] chore: use pubkey2index from BeaconChain (#8691) **Motivation** - as a preparation for lodestar-z integration, we should not access pubkey2index from CachedBeaconState **Description** - use that from BeaconChain instead part of #8652 Co-authored-by: Tuyen Nguyen --- .../beacon-node/src/api/impl/beacon/state/index.ts | 9 ++++----- packages/beacon-node/src/api/impl/validator/index.ts | 2 +- packages/beacon-node/src/chain/chain.ts | 2 +- .../src/chain/rewards/attestationsRewards.ts | 11 ++++++----- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/packages/beacon-node/src/api/impl/beacon/state/index.ts b/packages/beacon-node/src/api/impl/beacon/state/index.ts index b44248d0b1..ddbf9e7390 100644 --- a/packages/beacon-node/src/api/impl/beacon/state/index.ts +++ b/packages/beacon-node/src/api/impl/beacon/state/index.ts @@ -95,7 +95,7 @@ export function getBeaconStateApi({ const {state, executionOptimistic, finalized} = await getState(stateId); const currentEpoch = getCurrentEpoch(state); const {validators, balances} = state; // Get the validators sub tree once for all the loop - const {pubkey2index} = chain.getHeadState().epochCtx; + const {pubkey2index} = chain; const validatorResponses: routes.beacon.ValidatorResponse[] = []; if (validatorIds.length) { @@ -154,7 +154,7 @@ export function getBeaconStateApi({ async postStateValidatorIdentities({stateId, validatorIds = []}) { const {state, executionOptimistic, finalized} = await getState(stateId); - const {pubkey2index} = chain.getHeadState().epochCtx; + const {pubkey2index} = chain; let validatorIdentities: routes.beacon.ValidatorIdentities; @@ -187,7 +187,7 @@ export function getBeaconStateApi({ async getStateValidator({stateId, validatorId}) { const {state, executionOptimistic, finalized} = await getState(stateId); - const {pubkey2index} = chain.getHeadState().epochCtx; + const {pubkey2index} = chain; const resp = getStateValidatorIndex(validatorId, state, pubkey2index); if (!resp.valid) { @@ -212,10 +212,9 @@ export function getBeaconStateApi({ if (validatorIds.length) { assertUniqueItems(validatorIds, "Duplicate validator IDs provided"); - const headState = chain.getHeadState(); const balances: routes.beacon.ValidatorBalance[] = []; for (const id of validatorIds) { - const resp = getStateValidatorIndex(id, state, headState.epochCtx.pubkey2index); + const resp = getStateValidatorIndex(id, state, chain.pubkey2index); if (resp.valid) { balances.push({ diff --git a/packages/beacon-node/src/api/impl/validator/index.ts b/packages/beacon-node/src/api/impl/validator/index.ts index 1547aa9d66..7a391ac0b0 100644 --- a/packages/beacon-node/src/api/impl/validator/index.ts +++ b/packages/beacon-node/src/api/impl/validator/index.ts @@ -1511,7 +1511,7 @@ export function getValidatorApi( const filteredRegistrations = registrations.filter((registration) => { const {pubkey} = registration.message; - const validatorIndex = headState.epochCtx.pubkey2index.get(pubkey); + const validatorIndex = chain.pubkey2index.get(pubkey); if (validatorIndex === null) return false; const validator = headState.validators.getReadonly(validatorIndex); diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index 1e3a017c1a..048405a7a0 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -1344,7 +1344,7 @@ export class BeaconChain implements IBeaconChain { throw Error(`State is not in cache for slot ${slot}`); } - const rewards = await computeAttestationsRewards(epoch, cachedState, this.config, validatorIds); + const rewards = await computeAttestationsRewards(this.pubkey2index, cachedState, validatorIds); return {rewards, executionOptimistic, finalized}; } diff --git a/packages/beacon-node/src/chain/rewards/attestationsRewards.ts b/packages/beacon-node/src/chain/rewards/attestationsRewards.ts index 0b1c4dbc36..36fc07c15b 100644 --- a/packages/beacon-node/src/chain/rewards/attestationsRewards.ts +++ b/packages/beacon-node/src/chain/rewards/attestationsRewards.ts @@ -1,5 +1,5 @@ +import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; import {routes} from "@lodestar/api"; -import {BeaconConfig} from "@lodestar/config"; import { EFFECTIVE_BALANCE_INCREMENT, ForkName, @@ -25,7 +25,7 @@ import { hasMarkers, isInInactivityLeak, } from "@lodestar/state-transition"; -import {Epoch, ValidatorIndex} from "@lodestar/types"; +import {ValidatorIndex} from "@lodestar/types"; import {fromHex} from "@lodestar/utils"; export type AttestationsRewards = routes.beacon.AttestationsRewards; @@ -38,9 +38,8 @@ const defaultAttestationsReward = {head: 0, target: 0, source: 0, inclusionDelay const defaultAttestationsPenalty = {target: 0, source: 0}; export async function computeAttestationsRewards( - _epoch: Epoch, + pubkey2index: PubkeyIndexMap, state: CachedBeaconStateAllForks, - _config: BeaconConfig, validatorIds?: (ValidatorIndex | string)[] ): Promise { const fork = state.config.getForkName(state.slot); @@ -53,6 +52,7 @@ export async function computeAttestationsRewards( const [idealRewards, penalties] = computeIdealAttestationsRewardsAndPenaltiesAltair(stateAltair, transitionCache); const totalRewards = computeTotalAttestationsRewardsAltair( + pubkey2index, stateAltair, transitionCache, idealRewards, @@ -139,6 +139,7 @@ function computeIdealAttestationsRewardsAndPenaltiesAltair( // Same calculation as `getRewardsAndPenaltiesAltair` but returns the breakdown of rewards instead of aggregated function computeTotalAttestationsRewardsAltair( + pubkey2index: PubkeyIndexMap, state: CachedBeaconStateAltair, transitionCache: EpochTransitionCache, idealRewards: IdealAttestationsReward[], @@ -149,7 +150,7 @@ function computeTotalAttestationsRewardsAltair( const {flags} = transitionCache; const {epochCtx, config} = state; const validatorIndices = validatorIds - .map((id) => (typeof id === "number" ? id : epochCtx.pubkey2index.get(fromHex(id)))) + .map((id) => (typeof id === "number" ? id : pubkey2index.get(fromHex(id)))) .filter((index) => index !== undefined); // Validator indices to include in the result const inactivityPenaltyDenominator = config.INACTIVITY_SCORE_BIAS * INACTIVITY_PENALTY_QUOTIENT_ALTAIR; From d4a47659a5d3065b6a2a571b4e137bf6f33531d7 Mon Sep 17 00:00:00 2001 From: twoeths <10568965+twoeths@users.noreply.github.com> Date: Tue, 16 Dec 2025 20:47:13 +0700 Subject: [PATCH 05/20] feat: transfer pending gossipsub message msg data (#8689) **Motivation** - improve memory by transferirng gossipsub message data from network thread to the main thread - In snappy decompression in #8647 we had to do `Buffer.alloc()` instead of `Buffer.allocUnsafe()`. We don't have to feel bad about that because `Buffer.allocUnsafe()` does not work with this PR, and we don't waste any memory. **Description** - use `transferList` param when posting messages from network thread to the main thread part of #8629 **Testing** I've tested this on `feat2` for 3 days, the previous branch was #8671 so it's basically the current stable, does not see significant improvement but some good data for different nodes - no change on 1k or `novc` - on hoodi `sas` node we have better memory there on main thread with same mesh peers, same memory on network thread Screenshot 2025-12-12 at 11 05 27 - on mainnnet `sas` node, we have better memory on network thread, a little bit worse on the main thread Screenshot 2025-12-12 at 11 08 42 - but for this mainnet node, the most interesting metric is `forward msg avg peers`, we're faster than majority of them Screenshot 2025-12-12 at 11 11 00 --------- Co-authored-by: Tuyen Nguyen --- packages/beacon-node/src/network/core/events.ts | 2 +- packages/beacon-node/src/network/events.ts | 8 +++++++- packages/beacon-node/src/util/workerEvents.ts | 17 +++++++++-------- .../network/onWorker/dataSerialization.test.ts | 2 +- 4 files changed, 18 insertions(+), 11 deletions(-) diff --git a/packages/beacon-node/src/network/core/events.ts b/packages/beacon-node/src/network/core/events.ts index 31bb0cb5d8..d97341569a 100644 --- a/packages/beacon-node/src/network/core/events.ts +++ b/packages/beacon-node/src/network/core/events.ts @@ -2,7 +2,7 @@ import EventEmitter from "node:events"; import {ResponseIncoming, ResponseOutgoing} from "@lodestar/reqresp"; import {AsyncIterableEventBus, IteratorEvent, RequestEvent} from "../../util/asyncIterableToEvents.js"; import {StrictEventEmitterSingleArg} from "../../util/strictEvents.js"; -import {EventDirection} from "../../util/workerEvents.js"; +import {EventDirection} from "../events.js"; import {IncomingRequestArgs, OutgoingRequestArgs} from "../reqresp/types.js"; export enum ReqRespBridgeEvent { diff --git a/packages/beacon-node/src/network/events.ts b/packages/beacon-node/src/network/events.ts index a960ade5b8..20d34b9966 100644 --- a/packages/beacon-node/src/network/events.ts +++ b/packages/beacon-node/src/network/events.ts @@ -3,7 +3,6 @@ import {PeerId, TopicValidatorResult} from "@libp2p/interface"; import {CustodyIndex, Status} from "@lodestar/types"; import {PeerIdStr} from "../util/peerId.js"; import {StrictEventEmitterSingleArg} from "../util/strictEvents.js"; -import {EventDirection} from "../util/workerEvents.js"; import {PendingGossipsubMessage} from "./processor/types.js"; import {RequestTypedContainer} from "./reqresp/ReqRespBeaconNode.js"; @@ -38,6 +37,13 @@ export type NetworkEventData = { }; }; +export enum EventDirection { + workerToMain, + mainToWorker, + /** Event not emitted through worker boundary */ + none, +} + export const networkEventDirection: Record = { [NetworkEvent.peerConnected]: EventDirection.workerToMain, [NetworkEvent.peerDisconnected]: EventDirection.workerToMain, diff --git a/packages/beacon-node/src/util/workerEvents.ts b/packages/beacon-node/src/util/workerEvents.ts index 807bf7a306..24941bd316 100644 --- a/packages/beacon-node/src/util/workerEvents.ts +++ b/packages/beacon-node/src/util/workerEvents.ts @@ -1,9 +1,11 @@ import {MessagePort, Worker} from "node:worker_threads"; +import {Message} from "@libp2p/interface"; import {Thread} from "@chainsafe/threads"; import {Logger} from "@lodestar/logger"; import {sleep} from "@lodestar/utils"; import {Metrics} from "../metrics/metrics.js"; import {NetworkCoreWorkerMetrics} from "../network/core/metrics.js"; +import {EventDirection, NetworkEvent} from "../network/events.js"; import {StrictEventEmitterSingleArg} from "./strictEvents.js"; const NANO_TO_SECOND_CONVERSION = 1e9; @@ -15,13 +17,6 @@ export type WorkerBridgeEvent = { data: EventData[keyof EventData]; }; -export enum EventDirection { - workerToMain, - mainToWorker, - /** Event not emitted through worker boundary */ - none, -} - /** * Bridges events from worker to main thread * Each event can only have one direction: @@ -63,7 +58,13 @@ export function wireEventsOnWorkerThread( posted: process.hrtime(), data, }; - parentPort.postMessage(workerEvent); + let transferList: ArrayBuffer[] | undefined = undefined; + if (eventName === NetworkEvent.pendingGossipsubMessage) { + const payload = data as {msg: Message}; + // Transfer the underlying ArrayBuffer to avoid copy for PendingGossipsubMessage + transferList = [payload.msg.data.buffer as ArrayBuffer]; + } + parentPort.postMessage(workerEvent, transferList); }); } } diff --git a/packages/beacon-node/test/e2e/network/onWorker/dataSerialization.test.ts b/packages/beacon-node/test/e2e/network/onWorker/dataSerialization.test.ts index 0411c70a5a..35fdc57f4d 100644 --- a/packages/beacon-node/test/e2e/network/onWorker/dataSerialization.test.ts +++ b/packages/beacon-node/test/e2e/network/onWorker/dataSerialization.test.ts @@ -9,6 +9,7 @@ import {ZERO_HASH, ZERO_HASH_HEX} from "../../../../src/constants/constants.js"; import {ReqRespBridgeEvent, ReqRespBridgeEventData} from "../../../../src/network/core/events.js"; import {NetworkWorkerApi} from "../../../../src/network/core/index.js"; import { + EventDirection, GossipType, NetworkEvent, NetworkEventData, @@ -18,7 +19,6 @@ import { } from "../../../../src/network/index.js"; import {CommitteeSubscription} from "../../../../src/network/subnets/interface.js"; import {IteratorEventType} from "../../../../src/util/asyncIterableToEvents.js"; -import {EventDirection} from "../../../../src/util/workerEvents.js"; import {getValidPeerId, validPeerIdStr} from "../../../utils/peer.js"; import {EchoWorker, getEchoWorker} from "./workerEchoHandler.js"; From b37f2bd1bd977390d0f089132b7d6307cfa54683 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 Dec 2025 00:16:03 +0000 Subject: [PATCH 06/20] chore(deps): bump systeminformation from 5.23.8 to 5.27.14 (#8701) --- yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/yarn.lock b/yarn.lock index 2b7def47f2..640fffc476 100644 --- a/yarn.lock +++ b/yarn.lock @@ -10717,9 +10717,9 @@ symbol-tree@^3.2.4: integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== systeminformation@^5.22.9: - version "5.23.8" - resolved "https://registry.yarnpkg.com/systeminformation/-/systeminformation-5.23.8.tgz#b8efa73b36221cbcb432e3fe83dc1878a43f986a" - integrity sha512-Osd24mNKe6jr/YoXLLK3k8TMdzaxDffhpCxgkfgBHcapykIkd50HXThM3TCEuHO2pPuCsSx2ms/SunqhU5MmsQ== + version "5.27.14" + resolved "https://registry.yarnpkg.com/systeminformation/-/systeminformation-5.27.14.tgz#9f2b181521c151dad4972d47936ebb49a3271e9d" + integrity sha512-3DoNDYSZBLxBwaJtQGWNpq0fonga/VZ47HY1+7/G3YoIPaPz93Df6egSzzTKbEMmlzUpy3eQ0nR9REuYIycXGg== tar@^6.1.11, tar@^6.1.2: version "6.2.1" From aceb5b74164d9bf07b524680c8a152301cfb79a4 Mon Sep 17 00:00:00 2001 From: Nico Flaig Date: Wed, 17 Dec 2025 09:45:02 +0100 Subject: [PATCH 07/20] chore: remove eth1 related code (#8692) **Motivation** All networks are post-electra now and transition period is completed, which means due to [EIP-6110](https://eips.ethereum.org/EIPS/eip-6110) we no longer need to process deposits via eth1 bridge as those are now processed by the execution layer. This code is effectively tech debt, no longer exercised and just gets in the way when doing refactors. **Description** Removes all code related to eth1 bridge mechanism to include new deposits - removed all eth1 related code, we can no longer produce blocks with deposits pre-electra (syncing blocks still works) - building a genesis state from eth1 is no longer supported (only for testing) - removed various db repositories related to deposits/eth1 data - removed various `lodestar_eth1_*` metrics and dashboard panels - deprecated all `--eth1.*` flags (but kept for backward compatibility) - moved shared utility functions from eth1 to execution engine module Closes https://github.com/ChainSafe/lodestar/issues/7682 Closes https://github.com/ChainSafe/lodestar/issues/8654 --- biome.jsonc | 1 - dashboards/lodestar_block_production.json | 26 - dashboards/lodestar_execution_engine.json | 849 ------------------ dashboards/lodestar_summary.json | 12 - packages/beacon-node/package.json | 6 - .../src/api/impl/beacon/blocks/utils.ts | 2 +- packages/beacon-node/src/chain/chain.ts | 16 +- .../beacon-node/src/chain/genesis/genesis.ts | 190 ---- .../src/chain/genesis/interface.ts | 14 - packages/beacon-node/src/chain/initState.ts | 98 +- packages/beacon-node/src/chain/interface.ts | 2 - .../beacon-node/src/chain/prepareNextSlot.ts | 29 +- .../chain/produceBlock/produceBlockBody.ts | 16 +- packages/beacon-node/src/db/beacon.ts | 15 - packages/beacon-node/src/db/buckets.ts | 13 +- packages/beacon-node/src/db/interface.ts | 13 - .../src/db/repositories/depositDataRoot.ts | 80 -- .../src/db/repositories/depositEvent.ts | 32 - .../src/db/repositories/eth1Data.ts | 33 - .../beacon-node/src/db/repositories/index.ts | 3 - packages/beacon-node/src/db/single/index.ts | 2 - .../src/db/single/preGenesisState.ts | 37 - .../preGenesisStateLastProcessedBlock.ts | 34 - packages/beacon-node/src/eth1/errors.ts | 40 - .../beacon-node/src/eth1/eth1DataCache.ts | 26 - .../src/eth1/eth1DepositDataTracker.ts | 410 --------- .../beacon-node/src/eth1/eth1DepositsCache.ts | 141 --- packages/beacon-node/src/eth1/index.ts | 94 -- packages/beacon-node/src/eth1/interface.ts | 87 -- packages/beacon-node/src/eth1/options.ts | 28 - .../src/eth1/provider/eth1Provider.ts | 229 ----- .../beacon-node/src/eth1/provider/utils.ts | 136 --- packages/beacon-node/src/eth1/stream.ts | 75 -- .../src/eth1/utils/depositContract.ts | 37 - .../beacon-node/src/eth1/utils/deposits.ts | 70 -- .../beacon-node/src/eth1/utils/eth1Data.ts | 100 --- .../src/eth1/utils/eth1DepositEvent.ts | 12 - .../beacon-node/src/eth1/utils/eth1Vote.ts | 142 --- .../eth1/utils/groupDepositEventsByBlock.ts | 19 - .../utils/optimizeNextBlockDiffForGenesis.ts | 18 - .../beacon-node/src/execution/engine/http.ts | 17 +- .../beacon-node/src/execution/engine/index.ts | 2 +- .../src/execution/engine/interface.ts | 2 +- .../engine}/jsonRpcHttpClient.ts | 2 +- .../provider => execution/engine}/jwt.ts | 0 .../beacon-node/src/execution/engine/mock.ts | 3 +- .../src/execution/engine/payloadIdCache.ts | 2 +- .../beacon-node/src/execution/engine/types.ts | 18 +- .../beacon-node/src/execution/engine/utils.ts | 116 ++- packages/beacon-node/src/index.ts | 3 +- .../src/metrics/metrics/lodestar.ts | 92 -- packages/beacon-node/src/node/nodejs.ts | 9 - packages/beacon-node/src/node/options.ts | 3 - .../src/node/utils/interop/deposits.ts | 4 +- .../src/node/utils/interop/state.ts | 2 +- packages/beacon-node/src/node/utils/state.ts | 21 +- .../e2e/eth1/eth1ForBlockProduction.test.ts | 118 --- .../test/e2e/eth1/eth1Provider.test.ts | 99 -- .../beacon-node/test/e2e/eth1/stream.test.ts | 70 -- .../engine}/jsonRpcHttpClient.test.ts | 38 +- .../test/e2e/interop/genesisState.test.ts | 2 +- .../test/mocks/mockedBeaconChain.ts | 5 - .../beacon-node/test/mocks/mockedBeaconDb.ts | 11 - .../produceBlock/produceBlockBody.test.ts | 2 - .../perf/chain/verifyImportBlocks.test.ts | 2 - .../test/perf/eth1/pickEth1Vote.test.ts | 98 -- .../test/sim/electra-interop.test.ts | 5 +- .../test/spec/presets/fork_choice.test.ts | 3 - .../chain/genesis/genesis.test.ts | 120 --- .../api/impl/validator/produceBlockV3.test.ts | 4 - .../test/unit/db/api/repository.test.ts | 2 +- .../unit/eth1/eth1DepositDataTracker.test.ts | 91 -- .../unit/eth1/utils/depositContract.test.ts | 10 - .../test/unit/eth1/utils/deposits.test.ts | 208 ----- .../test/unit/eth1/utils/eth1Data.test.ts | 275 ------ .../unit/eth1/utils/eth1DepositEvent.test.ts | 46 - .../test/unit/eth1/utils/eth1Vote.test.ts | 171 ---- .../utils/groupDepositEventsByBlock.test.ts | 34 - .../optimizeNextBlockDiffForGenesis.test.ts | 55 -- .../engine}/hexEncoding.test.ts | 4 +- .../{eth1 => execution/engine}/jwt.test.ts | 4 +- .../test/unit/execution/engine/utils.test.ts | 2 +- .../test/unit/executionEngine/http.test.ts | 3 +- .../unit/executionEngine/httpRetry.test.ts | 2 +- .../test/utils/networkWithMockDb.ts | 2 - .../beacon-node/test/utils/node/beacon.ts | 10 +- packages/beacon-node/test/utils/runEl.ts | 20 +- packages/beacon-node/test/utils/testnet.ts | 51 -- packages/cli/src/cmds/beacon/handler.ts | 14 +- .../cli/src/cmds/beacon/initBeaconState.ts | 11 +- packages/cli/src/cmds/beacon/options.ts | 2 +- packages/cli/src/cmds/dev/files.ts | 2 +- packages/cli/src/cmds/dev/handler.ts | 2 +- packages/cli/src/cmds/dev/options.ts | 5 - packages/cli/src/networks/chiado.ts | 2 - packages/cli/src/networks/dev.ts | 1 - packages/cli/src/networks/ephemery.ts | 1 - packages/cli/src/networks/gnosis.ts | 2 - packages/cli/src/networks/hoodi.ts | 1 - packages/cli/src/networks/index.ts | 1 - packages/cli/src/networks/mainnet.ts | 1 - packages/cli/src/networks/sepolia.ts | 1 - .../cli/src/options/beaconNodeOptions/eth1.ts | 57 +- .../src/options/beaconNodeOptions/index.ts | 1 - packages/cli/test/unit/cmds/beacon.test.ts | 9 - .../unit/config/beaconNodeOptions.test.ts | 4 +- .../unit/options/beaconNodeOptions.test.ts | 44 - .../utils/crucible/clients/beacon/lodestar.ts | 2 - .../cli/test/utils/crucible/simulation.ts | 2 +- .../state-transition/src/cache/epochCache.ts | 3 - .../validator/src/services/validatorStore.ts | 16 +- scripts/dev/node1.sh | 1 - scripts/dev/node2.sh | 1 - 113 files changed, 238 insertions(+), 4998 deletions(-) delete mode 100644 packages/beacon-node/src/chain/genesis/genesis.ts delete mode 100644 packages/beacon-node/src/chain/genesis/interface.ts delete mode 100644 packages/beacon-node/src/db/repositories/depositDataRoot.ts delete mode 100644 packages/beacon-node/src/db/repositories/depositEvent.ts delete mode 100644 packages/beacon-node/src/db/repositories/eth1Data.ts delete mode 100644 packages/beacon-node/src/db/single/index.ts delete mode 100644 packages/beacon-node/src/db/single/preGenesisState.ts delete mode 100644 packages/beacon-node/src/db/single/preGenesisStateLastProcessedBlock.ts delete mode 100644 packages/beacon-node/src/eth1/errors.ts delete mode 100644 packages/beacon-node/src/eth1/eth1DataCache.ts delete mode 100644 packages/beacon-node/src/eth1/eth1DepositDataTracker.ts delete mode 100644 packages/beacon-node/src/eth1/eth1DepositsCache.ts delete mode 100644 packages/beacon-node/src/eth1/index.ts delete mode 100644 packages/beacon-node/src/eth1/interface.ts delete mode 100644 packages/beacon-node/src/eth1/options.ts delete mode 100644 packages/beacon-node/src/eth1/provider/eth1Provider.ts delete mode 100644 packages/beacon-node/src/eth1/provider/utils.ts delete mode 100644 packages/beacon-node/src/eth1/stream.ts delete mode 100644 packages/beacon-node/src/eth1/utils/depositContract.ts delete mode 100644 packages/beacon-node/src/eth1/utils/deposits.ts delete mode 100644 packages/beacon-node/src/eth1/utils/eth1Data.ts delete mode 100644 packages/beacon-node/src/eth1/utils/eth1DepositEvent.ts delete mode 100644 packages/beacon-node/src/eth1/utils/eth1Vote.ts delete mode 100644 packages/beacon-node/src/eth1/utils/groupDepositEventsByBlock.ts delete mode 100644 packages/beacon-node/src/eth1/utils/optimizeNextBlockDiffForGenesis.ts rename packages/beacon-node/src/{eth1/provider => execution/engine}/jsonRpcHttpClient.ts (99%) rename packages/beacon-node/src/{eth1/provider => execution/engine}/jwt.ts (100%) delete mode 100644 packages/beacon-node/test/e2e/eth1/eth1ForBlockProduction.test.ts delete mode 100644 packages/beacon-node/test/e2e/eth1/eth1Provider.test.ts delete mode 100644 packages/beacon-node/test/e2e/eth1/stream.test.ts rename packages/beacon-node/test/e2e/{eth1 => execution/engine}/jsonRpcHttpClient.test.ts (85%) delete mode 100644 packages/beacon-node/test/perf/eth1/pickEth1Vote.test.ts delete mode 100644 packages/beacon-node/test/unit-minimal/chain/genesis/genesis.test.ts delete mode 100644 packages/beacon-node/test/unit/eth1/eth1DepositDataTracker.test.ts delete mode 100644 packages/beacon-node/test/unit/eth1/utils/depositContract.test.ts delete mode 100644 packages/beacon-node/test/unit/eth1/utils/deposits.test.ts delete mode 100644 packages/beacon-node/test/unit/eth1/utils/eth1Data.test.ts delete mode 100644 packages/beacon-node/test/unit/eth1/utils/eth1DepositEvent.test.ts delete mode 100644 packages/beacon-node/test/unit/eth1/utils/eth1Vote.test.ts delete mode 100644 packages/beacon-node/test/unit/eth1/utils/groupDepositEventsByBlock.test.ts delete mode 100644 packages/beacon-node/test/unit/eth1/utils/optimizeNextBlockDiffForGenesis.test.ts rename packages/beacon-node/test/unit/{eth1 => execution/engine}/hexEncoding.test.ts (95%) rename packages/beacon-node/test/unit/{eth1 => execution/engine}/jwt.test.ts (94%) delete mode 100644 packages/beacon-node/test/utils/testnet.ts diff --git a/biome.jsonc b/biome.jsonc index 5ea8048972..f58cdb4888 100644 --- a/biome.jsonc +++ b/biome.jsonc @@ -273,7 +273,6 @@ "**/packages/beacon-node/src/db/buckets.ts", "**/packages/beacon-node/src/execution/engine/mock.ts", "**/packages/beacon-node/src/execution/engine/types.ts", - "**/packages/beacon-node/src/eth1/provider/eth1Provider.ts", "**/packages/validator/src/buckets.ts", "**/packages/prover/src/types.ts", "**/prover/src/utils/process.ts", diff --git a/dashboards/lodestar_block_production.json b/dashboards/lodestar_block_production.json index cb0af18080..93f819bc3e 100644 --- a/dashboards/lodestar_block_production.json +++ b/dashboards/lodestar_block_production.json @@ -212,19 +212,6 @@ "range": true, "refId": "attestations" }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "editorMode": "code", - "expr": "rate(beacon_block_production_execution_steps_seconds_sum{step=\"eth1DataAndDeposits\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds_count{step=\"eth1DataAndDeposits\"}[$rate_interval])", - "hide": false, - "instant": false, - "legendFormat": "{{step}}", - "range": true, - "refId": "eth1DataAndDeposits" - }, { "datasource": { "type": "prometheus", @@ -388,19 +375,6 @@ "range": true, "refId": "attestations" }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "editorMode": "code", - "expr": "rate(beacon_block_production_builder_steps_seconds_sum{step=\"eth1DataAndDeposits\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds_count{step=\"eth1DataAndDeposits\"}[$rate_interval])", - "hide": false, - "instant": false, - "legendFormat": "{{step}}", - "range": true, - "refId": "eth1DataAndDeposits" - }, { "datasource": { "type": "prometheus", diff --git a/dashboards/lodestar_execution_engine.json b/dashboards/lodestar_execution_engine.json index 434563d54e..628d5bb4fb 100644 --- a/dashboards/lodestar_execution_engine.json +++ b/dashboards/lodestar_execution_engine.json @@ -1608,855 +1608,6 @@ ], "title": "forkchoiceUpdatedV2", "type": "timeseries" - }, - { - "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 67 - }, - "id": 380, - "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "refId": "A" - } - ], - "title": "Eth1 Stats", - "type": "row" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisBorderShow": false, - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "drawStyle": "line", - "fillOpacity": 10, - "gradientMode": "opacity", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "insertNulls": false, - "lineInterpolation": "linear", - "lineWidth": 1, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" - }, - "showPoints": "never", - "spanNulls": true, - "stacking": { - "group": "A", - "mode": "none" - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "links": [], - "mappings": [], - "unit": "short" - }, - "overrides": [] - }, - "gridPos": { - "h": 8, - "w": 12, - "x": 0, - "y": 68 - }, - "id": 429, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true - }, - "tooltip": { - "mode": "single", - "sort": "none" - } - }, - "pluginVersion": "8.2.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_remote_highest_block", - "hide": false, - "interval": "", - "legendFormat": "remote_highest_block", - "refId": "A" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_last_processed_deposit_block_number", - "hide": false, - "interval": "", - "legendFormat": "last_processed_deposit_block", - "refId": "D" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_last_fetched_block_block_number", - "hide": false, - "interval": "", - "legendFormat": "last_fetched_block_block_number", - "refId": "B" - } - ], - "title": "Eth1 Block Details", - "type": "timeseries" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "mappings": [ - { - "options": { - "0": { - "index": 0, - "text": "False" - }, - "1": { - "index": 1, - "text": "True" - } - }, - "type": "value" - } - ], - "unit": "none" - }, - "overrides": [] - }, - "gridPos": { - "h": 4, - "w": 3, - "x": 12, - "y": 68 - }, - "id": 426, - "options": { - "colorMode": "value", - "graphMode": "area", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "lastNotNull" - ], - "fields": "", - "values": false - }, - "showPercentChange": false, - "text": {}, - "textMode": "value", - "wideLayout": true - }, - "pluginVersion": "10.4.1", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_deposit_tracker_is_caughtup", - "interval": "", - "legendFormat": "", - "refId": "A" - } - ], - "title": "Up to date", - "type": "stat" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "mappings": [] - }, - "overrides": [] - }, - "gridPos": { - "h": 4, - "w": 3, - "x": 15, - "y": 68 - }, - "id": 427, - "options": { - "colorMode": "value", - "graphMode": "area", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "lastNotNull" - ], - "fields": "", - "values": false - }, - "showPercentChange": false, - "text": {}, - "textMode": "auto", - "wideLayout": true - }, - "pluginVersion": "10.4.1", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_http_client_config_urls_count", - "hide": false, - "interval": "", - "legendFormat": "", - "refId": "A" - } - ], - "title": "Urls", - "type": "stat" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "mappings": [], - "unit": "s" - }, - "overrides": [] - }, - "gridPos": { - "h": 4, - "w": 3, - "x": 18, - "y": 68 - }, - "id": 411, - "options": { - "colorMode": "value", - "graphMode": "area", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "lastNotNull" - ], - "fields": "", - "values": false - }, - "showPercentChange": false, - "text": {}, - "textMode": "auto", - "wideLayout": true - }, - "pluginVersion": "10.4.1", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_follow_distance_seconds_config", - "hide": false, - "interval": "", - "legendFormat": "", - "refId": "A" - } - ], - "title": "Follow Config", - "type": "stat" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "mappings": [], - "unit": "dateTimeFromNow" - }, - "overrides": [] - }, - "gridPos": { - "h": 4, - "w": 3, - "x": 21, - "y": 68 - }, - "id": 431, - "options": { - "colorMode": "value", - "graphMode": "area", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "lastNotNull" - ], - "fields": "/^Time$/", - "values": false - }, - "showPercentChange": false, - "text": {}, - "textMode": "auto", - "wideLayout": true - }, - "pluginVersion": "10.4.1", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_last_fetched_block_timestamp", - "format": "time_series", - "hide": false, - "instant": false, - "interval": "", - "legendFormat": "eth1_last_fetched_block_timestamp", - "refId": "A" - } - ], - "title": "Last fetched", - "type": "stat" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisBorderShow": false, - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "drawStyle": "line", - "fillOpacity": 10, - "gradientMode": "opacity", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "insertNulls": false, - "lineInterpolation": "linear", - "lineWidth": 1, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" - }, - "showPoints": "never", - "spanNulls": true, - "stacking": { - "group": "A", - "mode": "none" - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "links": [], - "mappings": [], - "unit": "short" - }, - "overrides": [] - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 72 - }, - "id": 474, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true - }, - "tooltip": { - "mode": "single", - "sort": "none" - } - }, - "pluginVersion": "8.2.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_follow_distance_dynamic", - "hide": false, - "interval": "", - "legendFormat": "eth1_follow_distance_dynamic", - "refId": "A" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_blocks_batch_size_dynamic", - "hide": false, - "interval": "", - "legendFormat": "eth1_blocks_batch_size_dynamic", - "refId": "B" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_logs_batch_size_dynamic", - "hide": false, - "interval": "", - "legendFormat": "eth1_logs_batch_size_dynamic", - "refId": "C" - } - ], - "title": "Eth1 Dynamic Stats", - "type": "timeseries" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisBorderShow": false, - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "drawStyle": "line", - "fillOpacity": 10, - "gradientMode": "opacity", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "insertNulls": false, - "lineInterpolation": "linear", - "lineWidth": 1, - "pointSize": 1, - "scaleDistribution": { - "log": 2, - "type": "log" - }, - "showPoints": "always", - "spanNulls": true, - "stacking": { - "group": "A", - "mode": "none" - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "mappings": [], - "unit": "s" - }, - "overrides": [] - }, - "gridPos": { - "h": 8, - "w": 12, - "x": 0, - "y": 76 - }, - "id": 384, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true - }, - "tooltip": { - "mode": "multi", - "sort": "none" - } - }, - "pluginVersion": "8.4.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "rate(lodestar_eth1_http_client_request_time_seconds_sum[32m])/rate(lodestar_eth1_http_client_request_time_seconds_count[32m])", - "format": "time_series", - "hide": false, - "interval": "", - "legendFormat": "{{routeId}}", - "refId": "A" - } - ], - "title": "Average response times", - "type": "timeseries" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisBorderShow": false, - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "drawStyle": "line", - "fillOpacity": 10, - "gradientMode": "opacity", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "insertNulls": false, - "lineInterpolation": "linear", - "lineWidth": 1, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" - }, - "showPoints": "never", - "spanNulls": true, - "stacking": { - "group": "A", - "mode": "none" - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "links": [], - "mappings": [], - "unit": "short" - }, - "overrides": [] - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 78 - }, - "id": 413, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true - }, - "tooltip": { - "mode": "single", - "sort": "none" - } - }, - "pluginVersion": "8.2.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "lodestar_eth1_http_client_request_errors_total", - "interval": "", - "legendFormat": "{{routeId}} request_errors", - "refId": "A" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "rate(lodestar_eth1_deposit_tracker_update_errors_total[$rate_interval])", - "hide": false, - "interval": "", - "legendFormat": "eth1_deposit_tracker_update_errors_total", - "refId": "B" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "rate(lodestar_eth1_http_client_request_used_fallback_url_total[$rate_interval])", - "hide": false, - "interval": "", - "legendFormat": "{{routeId}} used_fallback_url", - "refId": "C" - } - ], - "title": "Error rates", - "type": "timeseries" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisBorderShow": false, - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "drawStyle": "line", - "fillOpacity": 7, - "gradientMode": "opacity", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "insertNulls": false, - "lineInterpolation": "linear", - "lineWidth": 1, - "pointSize": 1, - "scaleDistribution": { - "log": 2, - "type": "log" - }, - "showPoints": "always", - "spanNulls": true, - "stacking": { - "group": "A", - "mode": "none" - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "mappings": [], - "unit": "none" - }, - "overrides": [] - }, - "gridPos": { - "h": 8, - "w": 12, - "x": 0, - "y": 84 - }, - "id": 434, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true - }, - "tooltip": { - "mode": "multi", - "sort": "none" - } - }, - "pluginVersion": "8.4.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "12*rate(lodestar_eth1_http_client_request_time_seconds_count[32m])", - "interval": "", - "legendFormat": "{{routeId}}", - "refId": "A" - } - ], - "title": "Requests / slot", - "type": "timeseries" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisBorderShow": false, - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "drawStyle": "line", - "fillOpacity": 10, - "gradientMode": "opacity", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "insertNulls": false, - "lineInterpolation": "linear", - "lineWidth": 1, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" - }, - "showPoints": "never", - "spanNulls": true, - "stacking": { - "group": "A", - "mode": "none" - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "links": [], - "mappings": [], - "unit": "short" - }, - "overrides": [] - }, - "gridPos": { - "h": 8, - "w": 12, - "x": 12, - "y": 84 - }, - "id": 428, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true - }, - "tooltip": { - "mode": "single", - "sort": "none" - } - }, - "pluginVersion": "8.2.2", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "rate(lodestar_eth1_blocks_fetched_total[32m])", - "hide": false, - "interval": "", - "legendFormat": "blocks", - "refId": "A" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "rate(lodestar_eth1_deposit_events_fetched_total[32m])", - "hide": false, - "interval": "", - "legendFormat": "deposits", - "refId": "B" - } - ], - "title": "Eth1 fetch rate", - "type": "timeseries" } ], "refresh": "10s", diff --git a/dashboards/lodestar_summary.json b/dashboards/lodestar_summary.json index 6fbb00bac0..c978ebd76e 100644 --- a/dashboards/lodestar_summary.json +++ b/dashboards/lodestar_summary.json @@ -1825,18 +1825,6 @@ "range": true, "refId": "A" }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "exemplar": false, - "expr": "rate(lodestar_eth1_http_client_request_time_seconds_sum{routeId=\"getBlockNumber\"}[$rate_interval])\n/\nrate(lodestar_eth1_http_client_request_time_seconds_count{routeId=\"getBlockNumber\"}[$rate_interval])", - "hide": false, - "interval": "", - "legendFormat": "eth1_getBlockNumber_roundtrip", - "refId": "B" - }, { "datasource": { "type": "prometheus", diff --git a/packages/beacon-node/package.json b/packages/beacon-node/package.json index cf37aa989e..c90505e336 100644 --- a/packages/beacon-node/package.json +++ b/packages/beacon-node/package.json @@ -39,11 +39,6 @@ "types": "./lib/db/index.d.ts", "import": "./lib/db/index.js" }, - "./eth1": { - "bun": "./src/eth1/index.ts", - "types": "./lib/eth1/index.d.ts", - "import": "./lib/eth1/index.js" - }, "./metrics": { "bun": "./src/metrics/index.ts", "types": "./lib/metrics/index.d.ts", @@ -126,7 +121,6 @@ "@chainsafe/ssz": "^1.2.2", "@chainsafe/threads": "^1.11.3", "@crate-crypto/node-eth-kzg": "0.9.1", - "@ethersproject/abi": "^5.7.0", "@fastify/bearer-auth": "^10.0.1", "@fastify/cors": "^10.0.1", "@fastify/swagger": "^9.0.0", diff --git a/packages/beacon-node/src/api/impl/beacon/blocks/utils.ts b/packages/beacon-node/src/api/impl/beacon/blocks/utils.ts index 5ca1d02197..b27979dc41 100644 --- a/packages/beacon-node/src/api/impl/beacon/blocks/utils.ts +++ b/packages/beacon-node/src/api/impl/beacon/blocks/utils.ts @@ -5,7 +5,7 @@ import {blockToHeader} from "@lodestar/state-transition"; import {RootHex, SignedBeaconBlock, Slot} from "@lodestar/types"; import {IBeaconChain} from "../../../../chain/interface.js"; import {GENESIS_SLOT} from "../../../../constants/index.js"; -import {rootHexRegex} from "../../../../eth1/provider/utils.js"; +import {rootHexRegex} from "../../../../execution/engine/utils.js"; import {ApiError, ValidationError} from "../../errors.js"; export function toBeaconHeaderResponse( diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index 048405a7a0..e35e1f1406 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -43,7 +43,6 @@ import {Logger, fromHex, gweiToWei, isErrorAborted, pruneSetToMax, sleep, toRoot import {ProcessShutdownCallback} from "@lodestar/validator"; import {GENESIS_EPOCH, ZERO_HASH} from "../constants/index.js"; import {IBeaconDb} from "../db/index.js"; -import {IEth1ForBlockProduction} from "../eth1/index.js"; import {BuilderStatus} from "../execution/builder/http.js"; import {IExecutionBuilder, IExecutionEngine} from "../execution/index.js"; import {Metrics} from "../metrics/index.js"; @@ -117,7 +116,6 @@ const DEFAULT_MAX_CACHED_PRODUCED_RESULTS = 4; export class BeaconChain implements IBeaconChain { readonly genesisTime: UintNum64; readonly genesisValidatorsRoot: Root; - readonly eth1: IEth1ForBlockProduction; readonly executionEngine: IExecutionEngine; readonly executionBuilder?: IExecutionBuilder; // Expose config for convenience in modularized functions @@ -216,7 +214,6 @@ export class BeaconChain implements IBeaconChain { validatorMonitor, anchorState, isAnchorStateFinalized, - eth1, executionEngine, executionBuilder, }: { @@ -233,7 +230,6 @@ export class BeaconChain implements IBeaconChain { validatorMonitor: ValidatorMonitor | null; anchorState: BeaconStateAllForks; isAnchorStateFinalized: boolean; - eth1: IEth1ForBlockProduction; executionEngine: IExecutionEngine; executionBuilder?: IExecutionBuilder; } @@ -248,7 +244,6 @@ export class BeaconChain implements IBeaconChain { this.genesisTime = anchorState.genesisTime; this.anchorStateLatestBlockSlot = anchorState.latestBlockHeader.slot; this.genesisValidatorsRoot = anchorState.genesisValidatorsRoot; - this.eth1 = eth1; this.executionEngine = executionEngine; this.executionBuilder = executionBuilder; const signal = this.abortController.signal; @@ -294,7 +289,7 @@ export class BeaconChain implements IBeaconChain { // Restore state caches // anchorState may already by a CachedBeaconState. If so, don't create the cache again, since deserializing all // pubkeys takes ~30 seconds for 350k keys (mainnet 2022Q2). - // When the BeaconStateCache is created in eth1 genesis builder it may be incorrect. Until we can ensure that + // When the BeaconStateCache is created in initializeBeaconStateFromEth1 it may be incorrect. Until we can ensure that // it's safe to re-use _ANY_ BeaconStateCache, this option is disabled by default and only used in tests. const cachedState = isCachedBeaconState(anchorState) && opts.skipCreateStateCacheIfAvailable @@ -417,15 +412,6 @@ export class BeaconChain implements IBeaconChain { signal ); - // Stop polling eth1 data if anchor state is in Electra AND deposit_requests_start_index is reached - const anchorStateFork = this.config.getForkName(anchorState.slot); - if (isForkPostElectra(anchorStateFork)) { - const {eth1DepositIndex, depositRequestsStartIndex} = anchorState as BeaconStateElectra; - if (eth1DepositIndex === Number(depositRequestsStartIndex)) { - this.eth1.stopPollingEth1Data(); - } - } - // always run PrepareNextSlotScheduler except for fork_choice spec tests if (!opts?.disablePrepareNextSlot) { new PrepareNextSlotScheduler(this, this.config, metrics, this.logger, signal); diff --git a/packages/beacon-node/src/chain/genesis/genesis.ts b/packages/beacon-node/src/chain/genesis/genesis.ts deleted file mode 100644 index f56b42a3b8..0000000000 --- a/packages/beacon-node/src/chain/genesis/genesis.ts +++ /dev/null @@ -1,190 +0,0 @@ -import {Tree, toGindex} from "@chainsafe/persistent-merkle-tree"; -import {BeaconConfig, ChainForkConfig} from "@lodestar/config"; -import {GENESIS_EPOCH, GENESIS_SLOT} from "@lodestar/params"; -import { - BeaconStateAllForks, - CachedBeaconStateAllForks, - applyDeposits, - applyEth1BlockHash, - applyTimestamp, - createCachedBeaconState, - createEmptyEpochCacheImmutableData, - getActiveValidatorIndices, - getGenesisBeaconState, - getTemporaryBlockHeader, -} from "@lodestar/state-transition"; -import {phase0, ssz} from "@lodestar/types"; -import {Logger} from "@lodestar/utils"; -import {DepositTree} from "../../db/repositories/depositDataRoot.js"; -import {IEth1Provider} from "../../eth1/index.js"; -import {IEth1StreamParams} from "../../eth1/interface.js"; -import {getDepositsAndBlockStreamForGenesis, getDepositsStream} from "../../eth1/stream.js"; -import {GenesisResult, IGenesisBuilder} from "./interface.js"; - -export type GenesisBuilderKwargs = { - config: ChainForkConfig; - eth1Provider: IEth1Provider; - logger: Logger; - - /** Use to restore pending progress */ - pendingStatus?: { - state: BeaconStateAllForks; - depositTree: DepositTree; - lastProcessedBlockNumber: number; - }; - - signal?: AbortSignal; - maxBlocksPerPoll?: number; -}; - -export class GenesisBuilder implements IGenesisBuilder { - // Expose state to persist on error - readonly state: CachedBeaconStateAllForks; - readonly depositTree: DepositTree; - /** Is null if no block has been processed yet */ - lastProcessedBlockNumber: number | null = null; - - private readonly config: BeaconConfig; - private readonly eth1Provider: IEth1Provider; - private readonly logger: Logger; - private readonly signal?: AbortSignal; - private readonly eth1Params: IEth1StreamParams; - private readonly depositCache = new Set(); - private readonly fromBlock: number; - private readonly logEvery = 30 * 1000; - private lastLog = 0; - /** Current count of active validators in the state */ - private activatedValidatorCount: number; - - constructor({config, eth1Provider, logger, signal, pendingStatus, maxBlocksPerPoll}: GenesisBuilderKwargs) { - // at genesis builder, there is no genesis validator so we don't have a real BeaconConfig - // but we need BeaconConfig to temporarily create CachedBeaconState, the cast here is safe since we don't use any getDomain here - // the use of state as CachedBeaconState is just for convenient, GenesisResult returns TreeView anyway - this.eth1Provider = eth1Provider; - this.logger = logger; - this.signal = signal; - this.eth1Params = { - ...config, - maxBlocksPerPoll: maxBlocksPerPoll ?? 10000, - }; - - let stateView: BeaconStateAllForks; - - if (pendingStatus) { - this.logger.info("Restoring pending genesis state", {block: pendingStatus.lastProcessedBlockNumber}); - stateView = pendingStatus.state; - this.depositTree = pendingStatus.depositTree; - this.fromBlock = Math.max(pendingStatus.lastProcessedBlockNumber + 1, this.eth1Provider.deployBlock); - } else { - stateView = getGenesisBeaconState( - config, - ssz.phase0.Eth1Data.defaultValue(), - getTemporaryBlockHeader(config, config.getForkTypes(GENESIS_SLOT).BeaconBlock.defaultValue()) - ); - this.depositTree = ssz.phase0.DepositDataRootList.defaultViewDU(); - this.fromBlock = this.eth1Provider.deployBlock; - } - - // TODO - PENDING: Ensure EpochCacheImmutableData is created only once - this.state = createCachedBeaconState(stateView, createEmptyEpochCacheImmutableData(config, stateView)); - this.config = this.state.config; - this.activatedValidatorCount = getActiveValidatorIndices(stateView, GENESIS_EPOCH).length; - } - - /** - * Get eth1 deposit events and blocks and apply to this.state until we found genesis. - */ - async waitForGenesis(): Promise { - await this.eth1Provider.validateContract(); - - // Load data from data from this.db.depositData, this.db.depositDataRoot - // And start from a more recent fromBlock - const blockNumberValidatorGenesis = await this.waitForGenesisValidators(); - - const depositsAndBlocksStream = getDepositsAndBlockStreamForGenesis( - blockNumberValidatorGenesis, - this.eth1Provider, - this.eth1Params, - this.signal - ); - - for await (const [depositEvents, block] of depositsAndBlocksStream) { - this.applyDeposits(depositEvents); - applyTimestamp(this.config, this.state, block.timestamp); - applyEth1BlockHash(this.state, block.blockHash); - this.lastProcessedBlockNumber = block.blockNumber; - - if ( - this.state.genesisTime >= this.config.MIN_GENESIS_TIME && - this.activatedValidatorCount >= this.config.MIN_GENESIS_ACTIVE_VALIDATOR_COUNT - ) { - this.logger.info("Found genesis state", {blockNumber: block.blockNumber}); - return { - state: this.state, - depositTree: this.depositTree, - block, - }; - } - - this.throttledLog(`Waiting for min genesis time ${block.timestamp} / ${this.config.MIN_GENESIS_TIME}`); - } - - throw Error("depositsStream stopped without a valid genesis state"); - } - - /** - * First phase of waiting for genesis. - * Stream deposits events in batches as big as possible without querying block data - * @returns Block number at which there are enough active validators is state for genesis - */ - private async waitForGenesisValidators(): Promise { - const depositsStream = getDepositsStream(this.fromBlock, this.eth1Provider, this.eth1Params, this.signal); - - for await (const {depositEvents, blockNumber} of depositsStream) { - this.applyDeposits(depositEvents); - this.lastProcessedBlockNumber = blockNumber; - - if (this.activatedValidatorCount >= this.config.MIN_GENESIS_ACTIVE_VALIDATOR_COUNT) { - this.logger.info("Found enough genesis validators", {blockNumber}); - return blockNumber; - } - - this.throttledLog( - `Found ${this.state.validators.length} / ${this.config.MIN_GENESIS_ACTIVE_VALIDATOR_COUNT} validators to genesis` - ); - } - - throw Error("depositsStream stopped without a valid genesis state"); - } - - private applyDeposits(depositEvents: phase0.DepositEvent[]): void { - const newDeposits = depositEvents - .filter((depositEvent) => !this.depositCache.has(depositEvent.index)) - .map((depositEvent) => { - this.depositCache.add(depositEvent.index); - this.depositTree.push(ssz.phase0.DepositData.hashTreeRoot(depositEvent.depositData)); - const gindex = toGindex(this.depositTree.type.depth, BigInt(depositEvent.index)); - - // Apply changes from the push above - this.depositTree.commit(); - const depositTreeNode = this.depositTree.node; - return { - proof: new Tree(depositTreeNode).getSingleProof(gindex), - data: depositEvent.depositData, - }; - }); - - const {activatedValidatorCount} = applyDeposits(this.config, this.state, newDeposits, this.depositTree); - this.activatedValidatorCount += activatedValidatorCount; - - // TODO: If necessary persist deposits here to this.db.depositData, this.db.depositDataRoot - } - - /** Throttle genesis generation status log to prevent spamming */ - private throttledLog(message: string): void { - if (Date.now() - this.lastLog > this.logEvery) { - this.lastLog = Date.now(); - this.logger.info(message); - } - } -} diff --git a/packages/beacon-node/src/chain/genesis/interface.ts b/packages/beacon-node/src/chain/genesis/interface.ts deleted file mode 100644 index 79c402cc29..0000000000 --- a/packages/beacon-node/src/chain/genesis/interface.ts +++ /dev/null @@ -1,14 +0,0 @@ -import {CompositeViewDU, VectorCompositeType} from "@chainsafe/ssz"; -import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; -import {ssz} from "@lodestar/types"; -import {Eth1Block} from "../../eth1/interface.js"; - -export type GenesisResult = { - state: CachedBeaconStateAllForks; - depositTree: CompositeViewDU>; - block: Eth1Block; -}; - -export interface IGenesisBuilder { - waitForGenesis: () => Promise; -} diff --git a/packages/beacon-node/src/chain/initState.ts b/packages/beacon-node/src/chain/initState.ts index 40d7acd0a0..7eed602a91 100644 --- a/packages/beacon-node/src/chain/initState.ts +++ b/packages/beacon-node/src/chain/initState.ts @@ -1,37 +1,11 @@ import {ChainForkConfig} from "@lodestar/config"; import {ZERO_HASH} from "@lodestar/params"; -import { - BeaconStateAllForks, - CachedBeaconStateAllForks, - computeEpochAtSlot, - computeStartSlotAtEpoch, -} from "@lodestar/state-transition"; +import {BeaconStateAllForks, computeEpochAtSlot, computeStartSlotAtEpoch} from "@lodestar/state-transition"; import {SignedBeaconBlock, ssz} from "@lodestar/types"; import {Logger, toHex, toRootHex} from "@lodestar/utils"; import {GENESIS_SLOT} from "../constants/index.js"; import {IBeaconDb} from "../db/index.js"; -import {Eth1Provider} from "../eth1/index.js"; -import {Eth1Options} from "../eth1/options.js"; import {Metrics} from "../metrics/index.js"; -import {GenesisBuilder} from "./genesis/genesis.js"; -import {GenesisResult} from "./genesis/interface.js"; - -export async function persistGenesisResult( - db: IBeaconDb, - genesisResult: GenesisResult, - genesisBlock: SignedBeaconBlock -): Promise { - await Promise.all([ - db.stateArchive.add(genesisResult.state), - db.blockArchive.add(genesisBlock), - db.depositDataRoot.putList(genesisResult.depositTree.getAllReadonlyValues()), - db.eth1Data.put(genesisResult.block.timestamp, { - ...genesisResult.block, - depositCount: genesisResult.depositTree.length, - depositRoot: genesisResult.depositTree.hashTreeRoot(), - }), - ]); -} export async function persistAnchorState( config: ChainForkConfig, @@ -75,76 +49,6 @@ export function createGenesisBlock(config: ChainForkConfig, genesisState: Beacon return genesisBlock; } -/** - * Initialize and persist a genesis state and related data - */ -export async function initStateFromEth1({ - config, - db, - logger, - opts, - signal, -}: { - config: ChainForkConfig; - db: IBeaconDb; - logger: Logger; - opts: Eth1Options; - signal: AbortSignal; -}): Promise { - logger.info("Listening to eth1 for genesis state"); - - const statePreGenesis = await db.preGenesisState.get(); - const depositTree = await db.depositDataRoot.getDepositRootTree(); - const lastProcessedBlockNumber = await db.preGenesisStateLastProcessedBlock.get(); - - const builder = new GenesisBuilder({ - config, - eth1Provider: new Eth1Provider(config, {...opts, logger}, signal), - logger, - signal, - pendingStatus: - statePreGenesis && depositTree !== undefined && lastProcessedBlockNumber != null - ? {state: statePreGenesis, depositTree, lastProcessedBlockNumber} - : undefined, - }); - - try { - const genesisResult = await builder.waitForGenesis(); - - // Note: .hashTreeRoot() automatically commits() - const genesisBlock = createGenesisBlock(config, genesisResult.state); - const types = config.getForkTypes(GENESIS_SLOT); - const stateRoot = genesisResult.state.hashTreeRoot(); - const blockRoot = types.BeaconBlock.hashTreeRoot(genesisBlock.message); - - logger.info("Initializing genesis state", { - stateRoot: toRootHex(stateRoot), - blockRoot: toRootHex(blockRoot), - validatorCount: genesisResult.state.validators.length, - }); - - await persistGenesisResult(db, genesisResult, genesisBlock); - - logger.verbose("Clearing pending genesis state if any"); - await db.preGenesisState.delete(); - await db.preGenesisStateLastProcessedBlock.delete(); - - return genesisResult.state; - } catch (e) { - if (builder.lastProcessedBlockNumber != null) { - logger.info("Persisting genesis state", {block: builder.lastProcessedBlockNumber}); - - // Commit changed before serializing - builder.state.commit(); - - await db.preGenesisState.put(builder.state); - await db.depositDataRoot.putList(builder.depositTree.getAllReadonlyValues()); - await db.preGenesisStateLastProcessedBlock.put(builder.lastProcessedBlockNumber); - } - throw e; - } -} - /** * Restore the latest beacon state from db */ diff --git a/packages/beacon-node/src/chain/interface.ts b/packages/beacon-node/src/chain/interface.ts index 0cec9cb001..5b51be3b9a 100644 --- a/packages/beacon-node/src/chain/interface.ts +++ b/packages/beacon-node/src/chain/interface.ts @@ -25,7 +25,6 @@ import { phase0, } from "@lodestar/types"; import {Logger} from "@lodestar/utils"; -import {IEth1ForBlockProduction} from "../eth1/index.js"; import {IExecutionBuilder, IExecutionEngine} from "../execution/index.js"; import {Metrics} from "../metrics/metrics.js"; import {BufferPool} from "../util/bufferPool.js"; @@ -88,7 +87,6 @@ export interface IBeaconChain { readonly genesisTime: UintNum64; readonly genesisValidatorsRoot: Root; readonly earliestAvailableSlot: Slot; - readonly eth1: IEth1ForBlockProduction; readonly executionEngine: IExecutionEngine; readonly executionBuilder?: IExecutionBuilder; // Expose config for convenience in modularized functions diff --git a/packages/beacon-node/src/chain/prepareNextSlot.ts b/packages/beacon-node/src/chain/prepareNextSlot.ts index 78967f2ff2..24bbf399e8 100644 --- a/packages/beacon-node/src/chain/prepareNextSlot.ts +++ b/packages/beacon-node/src/chain/prepareNextSlot.ts @@ -1,9 +1,8 @@ import {routes} from "@lodestar/api"; import {ChainForkConfig} from "@lodestar/config"; import {getSafeExecutionBlockHash} from "@lodestar/fork-choice"; -import {ForkPostBellatrix, ForkSeq, SLOTS_PER_EPOCH, isForkPostElectra} from "@lodestar/params"; +import {ForkPostBellatrix, ForkSeq, SLOTS_PER_EPOCH} from "@lodestar/params"; import { - BeaconStateElectra, CachedBeaconStateAllForks, CachedBeaconStateExecutions, StateHashTreeRootSource, @@ -222,9 +221,6 @@ export class PrepareNextSlotScheduler { } this.metrics?.precomputeNextEpochTransition.hits.set(previousHits ?? 0); - // Check if we can stop polling eth1 data - this.stopEth1Polling(); - this.logger.verbose("Completed PrepareNextSlotScheduler epoch transition", { nextEpoch, headSlot, @@ -252,27 +248,4 @@ export class PrepareNextSlotScheduler { state.hashTreeRoot(); hashTreeRootTimer?.(); } - - /** - * Stop eth1 data polling after eth1_deposit_index has reached deposit_requests_start_index in Electra as described in EIP-6110 - */ - stopEth1Polling(): void { - // Only continue if eth1 is still polling and finalized checkpoint is in Electra. State regen is expensive - if (this.chain.eth1.isPollingEth1Data()) { - const finalizedCheckpoint = this.chain.forkChoice.getFinalizedCheckpoint(); - const checkpointFork = this.config.getForkInfoAtEpoch(finalizedCheckpoint.epoch).name; - - if (isForkPostElectra(checkpointFork)) { - const finalizedState = this.chain.getStateByCheckpoint(finalizedCheckpoint)?.state; - - if ( - finalizedState !== undefined && - finalizedState.eth1DepositIndex === Number((finalizedState as BeaconStateElectra).depositRequestsStartIndex) - ) { - // Signal eth1 to stop polling eth1Data - this.chain.eth1.stopPollingEth1Data(); - } - } - } - } } diff --git a/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts b/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts index 556e4a42f7..f3850a7972 100644 --- a/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts +++ b/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts @@ -45,7 +45,7 @@ import { } from "@lodestar/types"; import {Logger, sleep, toHex, toPubkeyHex, toRootHex} from "@lodestar/utils"; import {ZERO_HASH_HEX} from "../../constants/index.js"; -import {numToQuantity} from "../../eth1/provider/utils.js"; +import {numToQuantity} from "../../execution/engine/utils.js"; import { IExecutionBuilder, IExecutionEngine, @@ -78,7 +78,6 @@ export enum BlockProductionStep { voluntaryExits = "voluntaryExits", blsToExecutionChanges = "blsToExecutionChanges", attestations = "attestations", - eth1DataAndDeposits = "eth1DataAndDeposits", syncAggregate = "syncAggregate", executionPayload = "executionPayload", } @@ -667,20 +666,17 @@ export async function produceCommonBlockBody( step: BlockProductionStep.attestations, }); - const endEth1DataAndDeposits = stepsMetrics?.startTimer(); - const {eth1Data, deposits} = await this.eth1.getEth1DataAndDeposits(currentState); - endEth1DataAndDeposits?.({ - step: BlockProductionStep.eth1DataAndDeposits, - }); - const blockBody: Omit = { randaoReveal, graffiti, - eth1Data, + // Eth1 data voting is no longer required since electra + eth1Data: currentState.eth1Data, proposerSlashings, attesterSlashings, attestations, - deposits, + // Since electra, deposits are processed by the execution layer, + // we no longer support handling deposits from earlier forks. + deposits: [], voluntaryExits, }; diff --git a/packages/beacon-node/src/db/beacon.ts b/packages/beacon-node/src/db/beacon.ts index 32b31e568c..944e5fc29e 100644 --- a/packages/beacon-node/src/db/beacon.ts +++ b/packages/beacon-node/src/db/beacon.ts @@ -14,16 +14,12 @@ import { CheckpointHeaderRepository, DataColumnSidecarArchiveRepository, DataColumnSidecarRepository, - DepositDataRootRepository, - DepositEventRepository, - Eth1DataRepository, ProposerSlashingRepository, StateArchiveRepository, SyncCommitteeRepository, SyncCommitteeWitnessRepository, VoluntaryExitRepository, } from "./repositories/index.js"; -import {PreGenesisState, PreGenesisStateLastProcessedBlock} from "./single/index.js"; export type BeaconDbModules = { config: ChainForkConfig; @@ -45,14 +41,8 @@ export class BeaconDb implements IBeaconDb { voluntaryExit: VoluntaryExitRepository; proposerSlashing: ProposerSlashingRepository; attesterSlashing: AttesterSlashingRepository; - depositEvent: DepositEventRepository; blsToExecutionChange: BLSToExecutionChangeRepository; - depositDataRoot: DepositDataRootRepository; - eth1Data: Eth1DataRepository; - preGenesisState: PreGenesisState; - preGenesisStateLastProcessedBlock: PreGenesisStateLastProcessedBlock; - // lightclient bestLightClientUpdate: BestLightClientUpdateRepository; checkpointHeader: CheckpointHeaderRepository; @@ -80,11 +70,6 @@ export class BeaconDb implements IBeaconDb { this.blsToExecutionChange = new BLSToExecutionChangeRepository(config, db); this.proposerSlashing = new ProposerSlashingRepository(config, db); this.attesterSlashing = new AttesterSlashingRepository(config, db); - this.depositEvent = new DepositEventRepository(config, db); - this.depositDataRoot = new DepositDataRootRepository(config, db); - this.eth1Data = new Eth1DataRepository(config, db); - this.preGenesisState = new PreGenesisState(config, db); - this.preGenesisStateLastProcessedBlock = new PreGenesisStateLastProcessedBlock(config, db); // lightclient this.bestLightClientUpdate = new BestLightClientUpdateRepository(config, db); diff --git a/packages/beacon-node/src/db/buckets.ts b/packages/beacon-node/src/db/buckets.ts index 1e45d188d3..1d16244025 100644 --- a/packages/beacon-node/src/db/buckets.ts +++ b/packages/beacon-node/src/db/buckets.ts @@ -16,14 +16,13 @@ export enum Bucket { index_mainChain = 6, // Slot -> Root // justified, finalized state and block hashes index_chainInfo = 7, // Key -> Number64 | stateHash | blockHash - // eth1 processing - phase0_eth1Data = 8, // timestamp -> Eth1Data - index_depositDataRoot = 9, // depositIndex -> Root + // phase0_eth1Data = 8, // DEPRECATED - eth1 deposit tracking is not required since electra + // index_depositDataRoot = 9, // DEPRECATED - eth1 deposit tracking is not required since electra // op pool // phase0_attestation = 10, // DEPRECATED on v0.25.0 // phase0_aggregateAndProof = 11, // Root -> AggregateAndProof, DEPRECATED on v.27.0 - phase0_depositData = 12, // [DEPRECATED] index -> DepositData + // phase0_depositData = 12, // DEPRECATED - eth1 deposit tracking is not required since electra phase0_exit = 13, // ValidatorIndex -> VoluntaryExit phase0_proposerSlashing = 14, // ValidatorIndex -> ProposerSlashing allForks_attesterSlashing = 15, // Root -> AttesterSlashing @@ -32,15 +31,15 @@ export enum Bucket { allForks_checkpointState = 17, // Root -> BeaconState // allForks_pendingBlock = 25, // Root -> SignedBeaconBlock // DEPRECATED on v0.30.0 - phase0_depositEvent = 19, // depositIndex -> DepositEvent + // phase0_depositEvent = 19, // DEPRECATED - eth1 deposit tracking is not required since electra index_stateArchiveRootIndex = 26, // State Root -> slot deneb_blobSidecars = 27, // DENEB BeaconBlockRoot -> BlobSidecars deneb_blobSidecarsArchive = 28, // DENEB BeaconBlockSlot -> BlobSidecars - phase0_preGenesisState = 30, // Single = phase0.BeaconState - phase0_preGenesisStateLastProcessedBlock = 31, // Single = Uint8 + // phase0_preGenesisState = 30, // DEPRECATED - genesis from eth1 is no longer supported + // phase0_preGenesisStateLastProcessedBlock = 31, // DEPRECATED - genesis from eth1 is no longer supported // Lightclient server // altair_bestUpdatePerCommitteePeriod = 30, // DEPRECATED on v0.32.0 diff --git a/packages/beacon-node/src/db/interface.ts b/packages/beacon-node/src/db/interface.ts index a2f68192e3..1f186ae76a 100644 --- a/packages/beacon-node/src/db/interface.ts +++ b/packages/beacon-node/src/db/interface.ts @@ -12,16 +12,12 @@ import { CheckpointHeaderRepository, DataColumnSidecarArchiveRepository, DataColumnSidecarRepository, - DepositDataRootRepository, - DepositEventRepository, - Eth1DataRepository, ProposerSlashingRepository, StateArchiveRepository, SyncCommitteeRepository, SyncCommitteeWitnessRepository, VoluntaryExitRepository, } from "./repositories/index.js"; -import {PreGenesisState, PreGenesisStateLastProcessedBlock} from "./single/index.js"; /** * The DB service manages the data layer of the beacon chain @@ -48,17 +44,8 @@ export interface IBeaconDb { voluntaryExit: VoluntaryExitRepository; proposerSlashing: ProposerSlashingRepository; attesterSlashing: AttesterSlashingRepository; - depositEvent: DepositEventRepository; blsToExecutionChange: BLSToExecutionChangeRepository; - // eth1 processing - preGenesisState: PreGenesisState; - preGenesisStateLastProcessedBlock: PreGenesisStateLastProcessedBlock; - - // all deposit data roots and merkle tree - depositDataRoot: DepositDataRootRepository; - eth1Data: Eth1DataRepository; - // lightclient bestLightClientUpdate: BestLightClientUpdateRepository; checkpointHeader: CheckpointHeaderRepository; diff --git a/packages/beacon-node/src/db/repositories/depositDataRoot.ts b/packages/beacon-node/src/db/repositories/depositDataRoot.ts deleted file mode 100644 index 50648655e4..0000000000 --- a/packages/beacon-node/src/db/repositories/depositDataRoot.ts +++ /dev/null @@ -1,80 +0,0 @@ -import {ByteVectorType, CompositeViewDU, ListCompositeType} from "@chainsafe/ssz"; -import {ChainForkConfig} from "@lodestar/config"; -import {Db, KeyValue, Repository} from "@lodestar/db"; -import {Root, ssz} from "@lodestar/types"; -import {bytesToInt} from "@lodestar/utils"; -import {Bucket, getBucketNameByValue} from "../buckets.js"; - -// TODO: Review where is best to put this type -export type DepositTree = CompositeViewDU>; - -export class DepositDataRootRepository extends Repository { - private depositRootTree?: DepositTree; - - constructor(config: ChainForkConfig, db: Db) { - const bucket = Bucket.index_depositDataRoot; - super(config, db, bucket, ssz.Root, getBucketNameByValue(bucket)); - } - - decodeKey(data: Buffer): number { - return bytesToInt(super.decodeKey(data) as unknown as Uint8Array, "be"); - } - - // depositDataRoots stored by depositData index - getId(_value: Root): number { - throw new Error("Unable to create depositIndex from root"); - } - - async put(index: number, value: Root): Promise { - await super.put(index, value); - await this.depositRootTreeSet(index, value); - } - - async batchPut(items: KeyValue[]): Promise { - await super.batchPut(items); - for (const {key, value} of items) { - await this.depositRootTreeSet(key, value); - } - } - - async putList(roots: Root[]): Promise { - await this.batchPut(roots.map((root, index) => ({key: index, value: root}))); - } - - async batchPutValues(values: {index: number; root: Root}[]): Promise { - await this.batchPut( - values.map(({index, root}) => ({ - key: index, - value: root, - })) - ); - } - - async getDepositRootTree(): Promise { - if (!this.depositRootTree) { - const values = await this.values(); - this.depositRootTree = ssz.phase0.DepositDataRootList.toViewDU(values); - } - return this.depositRootTree; - } - - async getDepositRootTreeAtIndex(depositIndex: number): Promise { - const depositRootTree = await this.getDepositRootTree(); - return depositRootTree.sliceTo(depositIndex); - } - - private async depositRootTreeSet(index: number, value: Uint8Array): Promise { - const depositRootTree = await this.getDepositRootTree(); - - // TODO: Review and fix properly - if (index > depositRootTree.length) { - throw Error(`Error setting depositRootTree index ${index} > length ${depositRootTree.length}`); - } - - if (index === depositRootTree.length) { - depositRootTree.push(value); - } else { - depositRootTree.set(index, value); - } - } -} diff --git a/packages/beacon-node/src/db/repositories/depositEvent.ts b/packages/beacon-node/src/db/repositories/depositEvent.ts deleted file mode 100644 index f2e180a02c..0000000000 --- a/packages/beacon-node/src/db/repositories/depositEvent.ts +++ /dev/null @@ -1,32 +0,0 @@ -import {ChainForkConfig} from "@lodestar/config"; -import {Db, Repository} from "@lodestar/db"; -import {phase0, ssz} from "@lodestar/types"; -import {Bucket, getBucketNameByValue} from "../buckets.js"; - -/** - * DepositData indexed by deposit index - * Removed when included on chain or old - */ -export class DepositEventRepository extends Repository { - constructor(config: ChainForkConfig, db: Db) { - const bucket = Bucket.phase0_depositEvent; - super(config, db, bucket, ssz.phase0.DepositEvent, getBucketNameByValue(bucket)); - } - - async deleteOld(depositCount: number): Promise { - const firstDepositIndex = await this.firstKey(); - if (firstDepositIndex === null) { - return; - } - await this.batchDelete(Array.from({length: depositCount - firstDepositIndex}, (_, i) => i + firstDepositIndex)); - } - - async batchPutValues(depositEvents: phase0.DepositEvent[]): Promise { - await this.batchPut( - depositEvents.map((depositEvent) => ({ - key: depositEvent.index, - value: depositEvent, - })) - ); - } -} diff --git a/packages/beacon-node/src/db/repositories/eth1Data.ts b/packages/beacon-node/src/db/repositories/eth1Data.ts deleted file mode 100644 index 5a21e8f7d2..0000000000 --- a/packages/beacon-node/src/db/repositories/eth1Data.ts +++ /dev/null @@ -1,33 +0,0 @@ -import {ChainForkConfig} from "@lodestar/config"; -import {Db, Repository} from "@lodestar/db"; -import {phase0, ssz} from "@lodestar/types"; -import {bytesToInt} from "@lodestar/utils"; -import {Bucket, getBucketNameByValue} from "../buckets.js"; - -export class Eth1DataRepository extends Repository { - constructor(config: ChainForkConfig, db: Db) { - const bucket = Bucket.phase0_eth1Data; - super(config, db, bucket, ssz.phase0.Eth1DataOrdered, getBucketNameByValue(bucket)); - } - - decodeKey(data: Buffer): number { - return bytesToInt(super.decodeKey(data) as unknown as Uint8Array, "be"); - } - - getId(_value: phase0.Eth1Data): number { - throw new Error("Unable to create timestamp from block hash"); - } - - async batchPutValues(eth1Datas: (phase0.Eth1DataOrdered & {timestamp: number})[]): Promise { - await this.batchPut( - eth1Datas.map((eth1Data) => ({ - key: eth1Data.timestamp, - value: eth1Data, - })) - ); - } - - async deleteOld(timestamp: number): Promise { - await this.batchDelete(await this.keys({lt: timestamp})); - } -} diff --git a/packages/beacon-node/src/db/repositories/index.ts b/packages/beacon-node/src/db/repositories/index.ts index 13e8f570e9..94c02223ad 100644 --- a/packages/beacon-node/src/db/repositories/index.ts +++ b/packages/beacon-node/src/db/repositories/index.ts @@ -8,9 +8,6 @@ export {BlockArchiveRepository} from "./blockArchive.js"; export {BLSToExecutionChangeRepository} from "./blsToExecutionChange.js"; export {DataColumnSidecarRepository} from "./dataColumnSidecar.js"; export {DataColumnSidecarArchiveRepository} from "./dataColumnSidecarArchive.js"; -export {DepositDataRootRepository} from "./depositDataRoot.js"; -export {DepositEventRepository} from "./depositEvent.js"; -export {Eth1DataRepository} from "./eth1Data.js"; export {BestLightClientUpdateRepository} from "./lightclientBestUpdate.js"; export {CheckpointHeaderRepository} from "./lightclientCheckpointHeader.js"; export {SyncCommitteeRepository} from "./lightclientSyncCommittee.js"; diff --git a/packages/beacon-node/src/db/single/index.ts b/packages/beacon-node/src/db/single/index.ts deleted file mode 100644 index 463dc2f4b1..0000000000 --- a/packages/beacon-node/src/db/single/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {PreGenesisState} from "./preGenesisState.js"; -export {PreGenesisStateLastProcessedBlock} from "./preGenesisStateLastProcessedBlock.js"; diff --git a/packages/beacon-node/src/db/single/preGenesisState.ts b/packages/beacon-node/src/db/single/preGenesisState.ts deleted file mode 100644 index 60ef4ee701..0000000000 --- a/packages/beacon-node/src/db/single/preGenesisState.ts +++ /dev/null @@ -1,37 +0,0 @@ -import {ChainForkConfig} from "@lodestar/config"; -import {Db, DbReqOpts} from "@lodestar/db"; -import {ForkAll, GENESIS_SLOT} from "@lodestar/params"; -import {BeaconStateAllForks} from "@lodestar/state-transition"; -import {SSZTypesFor} from "@lodestar/types"; -import {Bucket, getBucketNameByValue} from "../buckets.js"; - -export class PreGenesisState { - private readonly config: ChainForkConfig; - private readonly bucket: Bucket; - private readonly db: Db; - private readonly key: Uint8Array; - private readonly type: SSZTypesFor; - private readonly dbReqOpts: DbReqOpts; - - constructor(config: ChainForkConfig, db: Db) { - this.config = config; - this.db = db; - this.bucket = Bucket.phase0_preGenesisState; - this.key = new Uint8Array([this.bucket]); - this.type = this.config.getForkTypes(GENESIS_SLOT).BeaconState; - this.dbReqOpts = {bucketId: getBucketNameByValue(this.bucket)}; - } - - async put(value: BeaconStateAllForks): Promise { - await this.db.put(this.key, value.serialize(), this.dbReqOpts); - } - - async get(): Promise { - const value = await this.db.get(this.key, this.dbReqOpts); - return value ? this.type.deserializeToViewDU(value) : null; - } - - async delete(): Promise { - await this.db.delete(this.key, this.dbReqOpts); - } -} diff --git a/packages/beacon-node/src/db/single/preGenesisStateLastProcessedBlock.ts b/packages/beacon-node/src/db/single/preGenesisStateLastProcessedBlock.ts deleted file mode 100644 index 5d27984adf..0000000000 --- a/packages/beacon-node/src/db/single/preGenesisStateLastProcessedBlock.ts +++ /dev/null @@ -1,34 +0,0 @@ -import {UintNumberType} from "@chainsafe/ssz"; -import {ChainForkConfig} from "@lodestar/config"; -import {Db, DbReqOpts} from "@lodestar/db"; -import {ssz} from "@lodestar/types"; -import {Bucket, getBucketNameByValue} from "../buckets.js"; - -export class PreGenesisStateLastProcessedBlock { - private readonly bucket: Bucket; - private readonly type: UintNumberType; - private readonly db: Db; - private readonly key: Uint8Array; - private readonly dbReqOpts: DbReqOpts; - - constructor(_config: ChainForkConfig, db: Db) { - this.db = db; - this.type = ssz.UintNum64; - this.bucket = Bucket.phase0_preGenesisStateLastProcessedBlock; - this.key = new Uint8Array([this.bucket]); - this.dbReqOpts = {bucketId: getBucketNameByValue(this.bucket)}; - } - - async put(value: number): Promise { - await this.db.put(this.key, this.type.serialize(value), this.dbReqOpts); - } - - async get(): Promise { - const value = await this.db.get(this.key, this.dbReqOpts); - return value ? this.type.deserialize(value) : null; - } - - async delete(): Promise { - await this.db.delete(this.key, this.dbReqOpts); - } -} diff --git a/packages/beacon-node/src/eth1/errors.ts b/packages/beacon-node/src/eth1/errors.ts deleted file mode 100644 index 914a5448ad..0000000000 --- a/packages/beacon-node/src/eth1/errors.ts +++ /dev/null @@ -1,40 +0,0 @@ -import {LodestarError} from "@lodestar/utils"; - -export enum Eth1ErrorCode { - /** Deposit index too high */ - DEPOSIT_INDEX_TOO_HIGH = "ETH1_ERROR_DEPOSIT_INDEX_TOO_HIGH", - /** Not enough deposits in DB */ - NOT_ENOUGH_DEPOSITS = "ETH1_ERROR_NOT_ENOUGH_DEPOSITS", - /** Too many deposits returned by DB */ - TOO_MANY_DEPOSITS = "ETH1_ERROR_TOO_MANY_DEPOSITS", - /** Deposit root tree does not match current eth1Data */ - WRONG_DEPOSIT_ROOT = "ETH1_ERROR_WRONG_DEPOSIT_ROOT", - - /** No deposits found for block range */ - NO_DEPOSITS_FOR_BLOCK_RANGE = "ETH1_ERROR_NO_DEPOSITS_FOR_BLOCK_RANGE", - /** No depositRoot for depositCount */ - NO_DEPOSIT_ROOT = "ETH1_ERROR_NO_DEPOSIT_ROOT", - /** Not enough deposit roots for index */ - NOT_ENOUGH_DEPOSIT_ROOTS = "ETH1_ERROR_NOT_ENOUGH_DEPOSIT_ROOTS", - - /** Attempted to insert a duplicate log for same index into the Eth1DepositsCache */ - DUPLICATE_DISTINCT_LOG = "ETH1_ERROR_DUPLICATE_DISTINCT_LOG", - /** Attempted to insert a log with index != prev + 1 into the Eth1DepositsCache */ - NON_CONSECUTIVE_LOGS = "ETH1_ERROR_NON_CONSECUTIVE_LOGS", - /** Expected a deposit log in the db for the index, missing log implies a corrupted db */ - MISSING_DEPOSIT_LOG = "ETH1_ERROR_MISSING_DEPOSIT_LOG", -} - -export type Eth1ErrorType = - | {code: Eth1ErrorCode.DEPOSIT_INDEX_TOO_HIGH; depositIndex: number; depositCount: number} - | {code: Eth1ErrorCode.NOT_ENOUGH_DEPOSITS; len: number; expectedLen: number} - | {code: Eth1ErrorCode.TOO_MANY_DEPOSITS; len: number; expectedLen: number} - | {code: Eth1ErrorCode.WRONG_DEPOSIT_ROOT; root: string; expectedRoot: string} - | {code: Eth1ErrorCode.NO_DEPOSITS_FOR_BLOCK_RANGE; fromBlock: number; toBlock: number} - | {code: Eth1ErrorCode.NO_DEPOSIT_ROOT; depositCount: number} - | {code: Eth1ErrorCode.NOT_ENOUGH_DEPOSIT_ROOTS; index: number; treeLength: number} - | {code: Eth1ErrorCode.DUPLICATE_DISTINCT_LOG; newIndex: number; lastLogIndex: number} - | {code: Eth1ErrorCode.NON_CONSECUTIVE_LOGS; newIndex: number; lastLogIndex: number} - | {code: Eth1ErrorCode.MISSING_DEPOSIT_LOG; newIndex: number; lastLogIndex: number}; - -export class Eth1Error extends LodestarError {} diff --git a/packages/beacon-node/src/eth1/eth1DataCache.ts b/packages/beacon-node/src/eth1/eth1DataCache.ts deleted file mode 100644 index 7f9bb99a2f..0000000000 --- a/packages/beacon-node/src/eth1/eth1DataCache.ts +++ /dev/null @@ -1,26 +0,0 @@ -import {ChainForkConfig} from "@lodestar/config"; -import {phase0} from "@lodestar/types"; -import {IBeaconDb} from "../db/index.js"; - -export class Eth1DataCache { - db: IBeaconDb; - config: ChainForkConfig; - - constructor(config: ChainForkConfig, db: IBeaconDb) { - this.config = config; - this.db = db; - } - - async get({timestampRange}: {timestampRange: {gte: number; lte: number}}): Promise { - return this.db.eth1Data.values(timestampRange); - } - - async add(eth1Datas: (phase0.Eth1DataOrdered & {timestamp: number})[]): Promise { - await this.db.eth1Data.batchPutValues(eth1Datas); - } - - async getHighestCachedBlockNumber(): Promise { - const highestEth1Data = await this.db.eth1Data.lastValue(); - return highestEth1Data?.blockNumber ?? null; - } -} diff --git a/packages/beacon-node/src/eth1/eth1DepositDataTracker.ts b/packages/beacon-node/src/eth1/eth1DepositDataTracker.ts deleted file mode 100644 index 43746face8..0000000000 --- a/packages/beacon-node/src/eth1/eth1DepositDataTracker.ts +++ /dev/null @@ -1,410 +0,0 @@ -import {ChainForkConfig} from "@lodestar/config"; -import { - BeaconStateAllForks, - CachedBeaconStateAllForks, - CachedBeaconStateElectra, - becomesNewEth1Data, -} from "@lodestar/state-transition"; -import {phase0, ssz} from "@lodestar/types"; -import {ErrorAborted, Logger, TimeoutError, fromHex, isErrorAborted, sleep} from "@lodestar/utils"; -import {IBeaconDb} from "../db/index.js"; -import {Metrics} from "../metrics/index.js"; -import {Eth1DataCache} from "./eth1DataCache.js"; -import {Eth1DepositsCache} from "./eth1DepositsCache.js"; -import {Eth1DataAndDeposits, EthJsonRpcBlockRaw, IEth1Provider} from "./interface.js"; -import {Eth1Options} from "./options.js"; -import {parseEth1Block} from "./provider/eth1Provider.js"; -import {HttpRpcError} from "./provider/jsonRpcHttpClient.js"; -import {isJsonRpcTruncatedError} from "./provider/utils.js"; -import {getDeposits} from "./utils/deposits.js"; -import {getEth1VotesToConsider, pickEth1Vote} from "./utils/eth1Vote.js"; - -const MAX_BLOCKS_PER_BLOCK_QUERY = 1000; -const MIN_BLOCKS_PER_BLOCK_QUERY = 10; - -const MAX_BLOCKS_PER_LOG_QUERY = 1000; -const MIN_BLOCKS_PER_LOG_QUERY = 10; - -/** Eth1 blocks happen every 14s approx, not need to update too often once synced */ -const AUTO_UPDATE_PERIOD_MS = 60 * 1000; -/** Prevent infinite loops */ -const MIN_UPDATE_PERIOD_MS = 1 * 1000; -/** Milliseconds to wait after getting 429 Too Many Requests */ -const RATE_LIMITED_WAIT_MS = 30 * 1000; -/** Min time to wait on auto update loop on unknown error */ -const MIN_WAIT_ON_ERROR_MS = 1 * 1000; - -/** Number of blocks to download if the node detects it is lagging behind due to an inaccurate - relationship between block-number-based follow distance and time-based follow distance. */ -const ETH1_FOLLOW_DISTANCE_DELTA_IF_SLOW = 32; - -/** The absolute minimum follow distance to enforce when downloading catchup batches, from LH */ -const ETH_MIN_FOLLOW_DISTANCE = 64; - -export type Eth1DepositDataTrackerModules = { - config: ChainForkConfig; - db: IBeaconDb; - metrics: Metrics | null; - logger: Logger; - signal: AbortSignal; -}; - -/** - * Main class handling eth1 data fetching, processing and storing - * Upon instantiation, starts fetching deposits and blocks at regular intervals - */ -export class Eth1DepositDataTracker { - private config: ChainForkConfig; - private logger: Logger; - private signal: AbortSignal; - private readonly metrics: Metrics | null; - - // Internal modules, state - private depositsCache: Eth1DepositsCache; - private eth1DataCache: Eth1DataCache; - private lastProcessedDepositBlockNumber: number | null = null; - - /** Dynamically adjusted follow distance */ - private eth1FollowDistance: number; - /** Dynamically adjusted batch size to fetch deposit logs */ - private eth1GetBlocksBatchSizeDynamic = MAX_BLOCKS_PER_BLOCK_QUERY; - /** Dynamically adjusted batch size to fetch deposit logs */ - private eth1GetLogsBatchSizeDynamic = MAX_BLOCKS_PER_LOG_QUERY; - private readonly forcedEth1DataVote: phase0.Eth1Data | null; - /** To stop `runAutoUpdate()` in addition to AbortSignal */ - private stopPolling: boolean; - - constructor( - opts: Eth1Options, - {config, db, metrics, logger, signal}: Eth1DepositDataTrackerModules, - private readonly eth1Provider: IEth1Provider - ) { - this.config = config; - this.metrics = metrics; - this.logger = logger; - this.signal = signal; - this.eth1Provider = eth1Provider; - this.depositsCache = new Eth1DepositsCache(opts, config, db); - this.eth1DataCache = new Eth1DataCache(config, db); - this.eth1FollowDistance = config.ETH1_FOLLOW_DISTANCE; - this.stopPolling = false; - - this.forcedEth1DataVote = opts.forcedEth1DataVote - ? ssz.phase0.Eth1Data.deserialize(fromHex(opts.forcedEth1DataVote)) - : null; - - if (opts.depositContractDeployBlock === undefined) { - this.logger.warn("No depositContractDeployBlock provided"); - } - - if (metrics) { - // Set constant value once - metrics?.eth1.eth1FollowDistanceSecondsConfig.set(config.SECONDS_PER_ETH1_BLOCK * config.ETH1_FOLLOW_DISTANCE); - metrics.eth1.eth1FollowDistanceDynamic.addCollect(() => { - metrics.eth1.eth1FollowDistanceDynamic.set(this.eth1FollowDistance); - metrics.eth1.eth1GetBlocksBatchSizeDynamic.set(this.eth1GetBlocksBatchSizeDynamic); - metrics.eth1.eth1GetLogsBatchSizeDynamic.set(this.eth1GetLogsBatchSizeDynamic); - }); - } - - if (opts.enabled) { - this.runAutoUpdate().catch((e: Error) => { - if (!(e instanceof ErrorAborted)) { - this.logger.error("Error on eth1 loop", {}, e); - } - }); - } - } - - isPollingEth1Data(): boolean { - return !this.stopPolling; - } - - stopPollingEth1Data(): void { - this.stopPolling = true; - } - - /** - * Return eth1Data and deposits ready for block production for a given state - */ - async getEth1DataAndDeposits(state: CachedBeaconStateAllForks): Promise { - if ( - state.epochCtx.isPostElectra() && - state.eth1DepositIndex >= (state as CachedBeaconStateElectra).depositRequestsStartIndex - ) { - // No need to poll eth1Data since Electra deprecates the mechanism after depositRequestsStartIndex is reached - return {eth1Data: state.eth1Data, deposits: []}; - } - const eth1Data = this.forcedEth1DataVote ?? (await this.getEth1Data(state)); - const deposits = await this.getDeposits(state, eth1Data); - return {eth1Data, deposits}; - } - - /** - * Returns an eth1Data vote for a given state. - * Requires internal caches to be updated regularly to return good results - */ - private async getEth1Data(state: BeaconStateAllForks): Promise { - try { - const eth1VotesToConsider = await getEth1VotesToConsider( - this.config, - state, - this.eth1DataCache.get.bind(this.eth1DataCache) - ); - return pickEth1Vote(state, eth1VotesToConsider); - } catch (e) { - // Note: In case there's a DB issue, don't stop a block proposal. Just vote for current eth1Data - this.logger.error("CRITICAL: Error reading valid votes, voting for current eth1Data", {}, e as Error); - return state.eth1Data; - } - } - - /** - * Returns deposits to be included for a given state and eth1Data vote. - * Requires internal caches to be updated regularly to return good results - */ - private async getDeposits( - state: CachedBeaconStateAllForks, - eth1DataVote: phase0.Eth1Data - ): Promise { - // No new deposits have to be included, continue - if (eth1DataVote.depositCount === state.eth1DepositIndex) { - return []; - } - - // TODO: Review if this is optimal - // Convert to view first to hash once and compare hashes - const eth1DataVoteView = ssz.phase0.Eth1Data.toViewDU(eth1DataVote); - - // Eth1 data may change due to the vote included in this block - const newEth1Data = becomesNewEth1Data(state, eth1DataVoteView) ? eth1DataVoteView : state.eth1Data; - return getDeposits(state, newEth1Data, this.depositsCache.get.bind(this.depositsCache)); - } - - /** - * Abortable async setInterval that runs its callback once at max between `ms` at minimum - */ - private async runAutoUpdate(): Promise { - let lastRunMs = 0; - - while (!this.signal.aborted && !this.stopPolling) { - lastRunMs = Date.now(); - - try { - const hasCaughtUp = await this.update(); - - this.metrics?.eth1.depositTrackerIsCaughtup.set(hasCaughtUp ? 1 : 0); - - if (hasCaughtUp) { - const sleepTimeMs = Math.max(AUTO_UPDATE_PERIOD_MS + lastRunMs - Date.now(), MIN_UPDATE_PERIOD_MS); - await sleep(sleepTimeMs, this.signal); - } - } catch (e) { - this.metrics?.eth1.depositTrackerUpdateErrors.inc(1); - - // From Infura: 429 Too Many Requests - if (e instanceof HttpRpcError && e.status === 429) { - this.logger.debug("Eth1 provider rate limited", {}, e); - await sleep(RATE_LIMITED_WAIT_MS, this.signal); - // only log error if state switched from online to some other state - } else if (!isErrorAborted(e)) { - await sleep(MIN_WAIT_ON_ERROR_MS, this.signal); - } - } - } - } - - /** - * Update the deposit and block cache, returning an error if either fail - * @returns true if it has catched up to the remote follow block - */ - private async update(): Promise { - const remoteHighestBlock = await this.eth1Provider.getBlockNumber(); - this.metrics?.eth1.remoteHighestBlock.set(remoteHighestBlock); - - const remoteFollowBlock = remoteHighestBlock - this.eth1FollowDistance; - - // If remoteFollowBlock is not at or beyond deployBlock, there is no need to - // fetch and track any deposit data yet - if (remoteFollowBlock < (this.eth1Provider.deployBlock ?? 0)) return true; - - const hasCaughtUpDeposits = await this.updateDepositCache(remoteFollowBlock); - const hasCaughtUpBlocks = await this.updateBlockCache(remoteFollowBlock); - return hasCaughtUpDeposits && hasCaughtUpBlocks; - } - - /** - * Fetch deposit events from remote eth1 node up to follow-distance block - * @returns true if it has catched up to the remote follow block - */ - private async updateDepositCache(remoteFollowBlock: number): Promise { - const lastProcessedDepositBlockNumber = await this.getLastProcessedDepositBlockNumber(); - // The DB may contain deposits from a different chain making lastProcessedDepositBlockNumber > current chain tip - // The Math.min() fixes those rare scenarios where fromBlock > toBlock - const fromBlock = Math.min(remoteFollowBlock, this.getFromBlockToFetch(lastProcessedDepositBlockNumber)); - const toBlock = Math.min(remoteFollowBlock, fromBlock + this.eth1GetLogsBatchSizeDynamic - 1); - - let depositEvents: phase0.DepositEvent[]; - try { - depositEvents = await this.eth1Provider.getDepositEvents(fromBlock, toBlock); - // Increase the batch size linearly even if we scale down exponentially (half each time) - this.eth1GetLogsBatchSizeDynamic = Math.min( - MAX_BLOCKS_PER_LOG_QUERY, - this.eth1GetLogsBatchSizeDynamic + MIN_BLOCKS_PER_LOG_QUERY - ); - } catch (e) { - if (isJsonRpcTruncatedError(e as Error) || e instanceof TimeoutError) { - this.eth1GetLogsBatchSizeDynamic = Math.max( - MIN_BLOCKS_PER_LOG_QUERY, - Math.floor(this.eth1GetLogsBatchSizeDynamic / 2) - ); - } - throw e; - } - - this.logger.verbose("Fetched deposits", {depositCount: depositEvents.length, fromBlock, toBlock}); - this.metrics?.eth1.depositEventsFetched.inc(depositEvents.length); - - await this.depositsCache.add(depositEvents); - // Store the `toBlock` since that block may not contain - this.lastProcessedDepositBlockNumber = toBlock; - this.metrics?.eth1.lastProcessedDepositBlockNumber.set(toBlock); - - return toBlock >= remoteFollowBlock; - } - - /** - * Fetch block headers from a remote eth1 node up to follow-distance block - * - * depositRoot and depositCount are inferred from already fetched deposits. - * Calling get_deposit_root() and the smart contract for a non-latest block requires an - * archive node, something most users don't have access too. - * @returns true if it has catched up to the remote follow timestamp - */ - private async updateBlockCache(remoteFollowBlock: number): Promise { - const lastCachedBlock = await this.eth1DataCache.getHighestCachedBlockNumber(); - // lastProcessedDepositBlockNumber sets the upper bound of the possible block range to fetch in this update - const lastProcessedDepositBlockNumber = await this.getLastProcessedDepositBlockNumber(); - // lowestEventBlockNumber set a lower bound of possible block range to fetch in this update - const lowestEventBlockNumber = await this.depositsCache.getLowestDepositEventBlockNumber(); - - // We are all caught up if: - // 1. If lowestEventBlockNumber is null = no deposits have been fetch or found yet. - // So there's not useful blocks to fetch until at least 1 deposit is found. - // 2. If the remoteFollowBlock is behind the lowestEventBlockNumber. This can happen - // if the EL's data was wiped and restarted. Not exiting here would other wise - // cause a NO_DEPOSITS_FOR_BLOCK_RANGE error - if ( - lowestEventBlockNumber === null || - lastProcessedDepositBlockNumber === null || - remoteFollowBlock < lowestEventBlockNumber - ) { - return true; - } - - // Cap the upper limit of fromBlock with remoteFollowBlock in case deployBlock is set to a different network value - const fromBlock = Math.min( - remoteFollowBlock, - // Fetch from the last cached block or the lowest known deposit block number - Math.max(this.getFromBlockToFetch(lastCachedBlock), lowestEventBlockNumber) - ); - const toBlock = Math.min( - remoteFollowBlock, - fromBlock + this.eth1GetBlocksBatchSizeDynamic - 1, // Block range is inclusive - lastProcessedDepositBlockNumber - ); - - let blocksRaw: EthJsonRpcBlockRaw[]; - try { - blocksRaw = await this.eth1Provider.getBlocksByNumber(fromBlock, toBlock); - // Increase the batch size linearly even if we scale down exponentially (half each time) - this.eth1GetBlocksBatchSizeDynamic = Math.min( - MAX_BLOCKS_PER_BLOCK_QUERY, - this.eth1GetBlocksBatchSizeDynamic + MIN_BLOCKS_PER_BLOCK_QUERY - ); - } catch (e) { - if (isJsonRpcTruncatedError(e as Error) || e instanceof TimeoutError) { - this.eth1GetBlocksBatchSizeDynamic = Math.max( - MIN_BLOCKS_PER_BLOCK_QUERY, - Math.floor(this.eth1GetBlocksBatchSizeDynamic / 2) - ); - } - throw e; - } - const blocks = blocksRaw.map(parseEth1Block); - - this.logger.verbose("Fetched eth1 blocks", {blockCount: blocks.length, fromBlock, toBlock}); - this.metrics?.eth1.blocksFetched.inc(blocks.length); - this.metrics?.eth1.lastFetchedBlockBlockNumber.set(toBlock); - const lastBlock = blocks.at(-1); - if (lastBlock) { - this.metrics?.eth1.lastFetchedBlockTimestamp.set(lastBlock.timestamp); - } - - const eth1Datas = await this.depositsCache.getEth1DataForBlocks(blocks, lastProcessedDepositBlockNumber); - await this.eth1DataCache.add(eth1Datas); - - // Note: ETH1_FOLLOW_DISTANCE_SECONDS = ETH1_FOLLOW_DISTANCE * SECONDS_PER_ETH1_BLOCK - // Deposit tracker must fetch blocks and deposits up to ETH1_FOLLOW_DISTANCE_SECONDS, - // measured in time not blocks. To vote on valid votes it must populate up to the time based follow distance. - // If it assumes SECONDS_PER_ETH1_BLOCK but block times are: - // - slower: Cache will not contain all blocks - // - faster: Cache will contain all required blocks + some ahead of timed follow distance - // - // For mainnet we must fetch blocks up until block.timestamp < now - 28672 sec. Based on follow distance: - // Block times | actual follow distance - // 14 | 2048 - // 20 | 1434 - // 30 | 956 - // 60 | 478 - // - // So if after fetching the block at ETH1_FOLLOW_DISTANCE, but it's timestamp is not greater than - // ETH1_FOLLOW_DISTANCE_SECONDS, reduce the ETH1_FOLLOW_DISTANCE by a small delta and fetch more blocks. - // Otherwise if the last fetched block if above ETH1_FOLLOW_DISTANCE_SECONDS, reduce ETH1_FOLLOW_DISTANCE. - - if (toBlock < remoteFollowBlock) { - return false; - } - - if (!lastBlock) { - return true; - } - - const remoteFollowBlockTimestamp = - Math.round(Date.now() / 1000) - this.config.SECONDS_PER_ETH1_BLOCK * this.config.ETH1_FOLLOW_DISTANCE; - const blockAfterTargetTimestamp = blocks.find((block) => block.timestamp >= remoteFollowBlockTimestamp); - - if (blockAfterTargetTimestamp) { - // Catched up to target timestamp, increase eth1FollowDistance. Limit max config.ETH1_FOLLOW_DISTANCE. - // If the block that's right above the timestamp has been fetched now, use it to compute the precise delta. - const delta = Math.max(lastBlock.blockNumber - blockAfterTargetTimestamp.blockNumber, 1); - this.eth1FollowDistance = Math.min(this.eth1FollowDistance + delta, this.config.ETH1_FOLLOW_DISTANCE); - - return true; - } - // Blocks are slower than expected, reduce eth1FollowDistance. Limit min CATCHUP_MIN_FOLLOW_DISTANCE - const delta = - this.eth1FollowDistance - - Math.max(this.eth1FollowDistance - ETH1_FOLLOW_DISTANCE_DELTA_IF_SLOW, ETH_MIN_FOLLOW_DISTANCE); - this.eth1FollowDistance = this.eth1FollowDistance - delta; - - // Even if the blocks are slow, when we are all caught up as there is no - // further possibility to reduce follow distance, we need to call it quits - // for now, else it leads to an incessant poll on the EL - return delta === 0; - } - - private getFromBlockToFetch(lastCachedBlock: number | null): number { - if (lastCachedBlock === null) { - return this.eth1Provider.deployBlock ?? 0; - } - return lastCachedBlock + 1; - } - - private async getLastProcessedDepositBlockNumber(): Promise { - if (this.lastProcessedDepositBlockNumber === null) { - this.lastProcessedDepositBlockNumber = await this.depositsCache.getHighestDepositEventBlockNumber(); - } - return this.lastProcessedDepositBlockNumber; - } -} diff --git a/packages/beacon-node/src/eth1/eth1DepositsCache.ts b/packages/beacon-node/src/eth1/eth1DepositsCache.ts deleted file mode 100644 index c84cd63752..0000000000 --- a/packages/beacon-node/src/eth1/eth1DepositsCache.ts +++ /dev/null @@ -1,141 +0,0 @@ -import {byteArrayEquals} from "@chainsafe/ssz"; -import {ChainForkConfig} from "@lodestar/config"; -import {FilterOptions} from "@lodestar/db"; -import {phase0, ssz} from "@lodestar/types"; -import {IBeaconDb} from "../db/index.js"; -import {Eth1Error, Eth1ErrorCode} from "./errors.js"; -import {Eth1Block} from "./interface.js"; -import {getDepositsWithProofs} from "./utils/deposits.js"; -import {getEth1DataForBlocks} from "./utils/eth1Data.js"; -import {assertConsecutiveDeposits} from "./utils/eth1DepositEvent.js"; - -export class Eth1DepositsCache { - unsafeAllowDepositDataOverwrite: boolean; - db: IBeaconDb; - config: ChainForkConfig; - - constructor(opts: {unsafeAllowDepositDataOverwrite?: boolean}, config: ChainForkConfig, db: IBeaconDb) { - this.config = config; - this.db = db; - this.unsafeAllowDepositDataOverwrite = opts.unsafeAllowDepositDataOverwrite ?? false; - } - - /** - * Returns a list of `Deposit` objects, within the given deposit index `range`. - * - * The `depositCount` is used to generate the proofs for the `Deposits`. For example, if we - * have 100 proofs, but the Ethereum Consensus chain only acknowledges 50 of them, we must produce our - * proofs with respect to a tree size of 50. - */ - async get(indexRange: FilterOptions, eth1Data: phase0.Eth1Data): Promise { - const depositEvents = await this.db.depositEvent.values(indexRange); - const depositRootTree = await this.db.depositDataRoot.getDepositRootTree(); - return getDepositsWithProofs(depositEvents, depositRootTree, eth1Data); - } - - /** - * Add log to cache - * This function enforces that `logs` are imported one-by-one with consecutive indexes - */ - async add(depositEvents: phase0.DepositEvent[]): Promise { - assertConsecutiveDeposits(depositEvents); - - const lastLog = await this.db.depositEvent.lastValue(); - const firstEvent = depositEvents[0]; - - // Check, validate and skip if we got any deposit events already present in DB - // This can happen if the remote eth1/EL resets its head in these four scenarios: - // 1. Remote eth1/EL resynced/restarted from head behind its previous head pre-merge - // 2. In a post merge scenario, Lodestar restarted from finalized state from DB which - // generally is a few epochs behind the last synced head. This causes eth1 tracker to reset - // and refetch the deposits as the lodestar syncs further along (Post merge there is 1-1 - // correspondence between EL and CL blocks) - // 3. The EL reorged beyond the eth1 follow distance. - // - // While 1. & 2. are benign and we handle them below by checking if the duplicate log fetched - // is same as one written in DB. Refer to this issue for some data dump of how this happens - // https://github.com/ChainSafe/lodestar/issues/3674 - // - // If the duplicate log fetched is not same as written in DB then its probablu scenario 3. - // which would be a catastrophic event for the network (or we messed up real bad!!!). - // - // So we provide for a way to overwrite this log without deleting full db via - // --unsafeAllowDepositDataOverwrite cli flag which will just overwrite the previous tracker data - // if any. This option as indicated by its name is unsafe and to be only used if you know what - // you are doing. - if (lastLog !== null && firstEvent !== undefined) { - const newIndex = firstEvent.index; - const lastLogIndex = lastLog.index; - - if (!this.unsafeAllowDepositDataOverwrite && firstEvent.index <= lastLog.index) { - // lastLogIndex - newIndex + 1 events are duplicate since this is a consecutive log - // as asserted by assertConsecutiveDeposits. Splice those events out from depositEvents. - const skipEvents = depositEvents.splice(0, lastLogIndex - newIndex + 1); - // After splicing skipEvents will contain duplicate events to be checked and validated - // and rest of the remaining events in depositEvents could be safely written to DB and - // move the tracker along. - for (const depositEvent of skipEvents) { - const prevDBSerializedEvent = await this.db.depositEvent.getBinary(depositEvent.index); - if (!prevDBSerializedEvent) { - throw new Eth1Error({code: Eth1ErrorCode.MISSING_DEPOSIT_LOG, newIndex, lastLogIndex}); - } - const serializedEvent = ssz.phase0.DepositEvent.serialize(depositEvent); - if (!byteArrayEquals(prevDBSerializedEvent, serializedEvent)) { - throw new Eth1Error({code: Eth1ErrorCode.DUPLICATE_DISTINCT_LOG, newIndex, lastLogIndex}); - } - } - } else if (newIndex > lastLogIndex + 1) { - // deposit events need to be consective, the way we fetch our tracker. If the deposit event - // is not consecutive it means either our tracker, or the corresponding eth1/EL - // node or the database has messed up. All these failures are critical and the tracker - // shouldn't proceed without the resolution of this error. - throw new Eth1Error({code: Eth1ErrorCode.NON_CONSECUTIVE_LOGS, newIndex, lastLogIndex}); - } - } - - const depositRoots = depositEvents.map((depositEvent) => ({ - index: depositEvent.index, - root: ssz.phase0.DepositData.hashTreeRoot(depositEvent.depositData), - })); - - // Store events after verifying that data is consecutive - // depositDataRoot will throw if adding non consecutive roots - await this.db.depositDataRoot.batchPutValues(depositRoots); - await this.db.depositEvent.batchPutValues(depositEvents); - } - - /** - * Appends partial eth1 data (depositRoot, depositCount) in a block range (inclusive) - * Returned array is sequential and ascending in blockNumber - * @param fromBlock - * @param toBlock - */ - async getEth1DataForBlocks( - blocks: Eth1Block[], - lastProcessedDepositBlockNumber: number | null - ): Promise<(phase0.Eth1Data & Eth1Block)[]> { - const highestBlock = blocks.at(-1)?.blockNumber; - return getEth1DataForBlocks( - blocks, - this.db.depositEvent.valuesStream({lte: highestBlock, reverse: true}), - await this.db.depositDataRoot.getDepositRootTree(), - lastProcessedDepositBlockNumber - ); - } - - /** - * Returns the highest blockNumber stored in DB if any - */ - async getHighestDepositEventBlockNumber(): Promise { - const latestEvent = await this.db.depositEvent.lastValue(); - return latestEvent?.blockNumber || null; - } - - /** - * Returns the lowest blockNumber stored in DB if any - */ - async getLowestDepositEventBlockNumber(): Promise { - const firstEvent = await this.db.depositEvent.firstValue(); - return firstEvent?.blockNumber || null; - } -} diff --git a/packages/beacon-node/src/eth1/index.ts b/packages/beacon-node/src/eth1/index.ts deleted file mode 100644 index 02dffb3d4a..0000000000 --- a/packages/beacon-node/src/eth1/index.ts +++ /dev/null @@ -1,94 +0,0 @@ -import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; -import {Eth1DepositDataTracker, Eth1DepositDataTrackerModules} from "./eth1DepositDataTracker.js"; -import {Eth1DataAndDeposits, IEth1ForBlockProduction, IEth1Provider} from "./interface.js"; -import {Eth1Options} from "./options.js"; -import {Eth1Provider} from "./provider/eth1Provider.js"; -export {Eth1Provider}; -export type {IEth1ForBlockProduction, IEth1Provider}; - -// This module encapsulates all consumer functionality to the execution node (formerly eth1). The execution client -// has to: -// -// - For genesis, the beacon node must follow the eth1 chain: get all deposit events + blocks within that range. -// Once the genesis conditions are met, start the POS chain with the resulting state. The logic is similar to the -// two points below, but the implementation is specialized for each scenario. -// -// - Follow the eth1 block chain to validate eth1Data votes. It needs all consecutive blocks within a specific range -// and at a distance from the head. -// ETH1_FOLLOW_DISTANCE uint64(2**11) (= 2,048) Eth1 blocks ~8 hours -// EPOCHS_PER_ETH1_VOTING_PERIOD uint64(2**6) (= 64) epochs ~6.8 hours -// -// - Fetch ALL deposit events from the deposit contract to build the deposit tree and validate future merkle proofs. -// Then it must follow deposit events at a distance roughly similar to the `ETH1_FOLLOW_DISTANCE` parameter above. - -export function initializeEth1ForBlockProduction( - opts: Eth1Options, - modules: Pick -): IEth1ForBlockProduction { - if (opts.enabled) { - return new Eth1ForBlockProduction(opts, { - config: modules.config, - db: modules.db, - metrics: modules.metrics, - logger: modules.logger, - signal: modules.signal, - }); - } - return new Eth1ForBlockProductionDisabled(); -} - -export class Eth1ForBlockProduction implements IEth1ForBlockProduction { - private readonly eth1DepositDataTracker: Eth1DepositDataTracker | null; - - constructor(opts: Eth1Options, modules: Eth1DepositDataTrackerModules & {eth1Provider?: IEth1Provider}) { - const eth1Provider = - modules.eth1Provider || - new Eth1Provider( - modules.config, - {...opts, logger: modules.logger}, - modules.signal, - modules.metrics?.eth1HttpClient - ); - - this.eth1DepositDataTracker = opts.disableEth1DepositDataTracker - ? null - : new Eth1DepositDataTracker(opts, modules, eth1Provider); - } - - async getEth1DataAndDeposits(state: CachedBeaconStateAllForks): Promise { - if (this.eth1DepositDataTracker === null) { - return {eth1Data: state.eth1Data, deposits: []}; - } - return this.eth1DepositDataTracker.getEth1DataAndDeposits(state); - } - - isPollingEth1Data(): boolean { - return this.eth1DepositDataTracker?.isPollingEth1Data() ?? false; - } - - stopPollingEth1Data(): void { - this.eth1DepositDataTracker?.stopPollingEth1Data(); - } -} - -/** - * Disabled version of Eth1ForBlockProduction - * May produce invalid blocks by not adding new deposits and voting for the same eth1Data - */ -export class Eth1ForBlockProductionDisabled implements IEth1ForBlockProduction { - /** - * Returns same eth1Data as in state and no deposits - * May produce invalid blocks if deposits have to be added - */ - async getEth1DataAndDeposits(state: CachedBeaconStateAllForks): Promise { - return {eth1Data: state.eth1Data, deposits: []}; - } - - isPollingEth1Data(): boolean { - return false; - } - - stopPollingEth1Data(): void { - // Ignore - } -} diff --git a/packages/beacon-node/src/eth1/interface.ts b/packages/beacon-node/src/eth1/interface.ts deleted file mode 100644 index eecec8fc77..0000000000 --- a/packages/beacon-node/src/eth1/interface.ts +++ /dev/null @@ -1,87 +0,0 @@ -import {BeaconConfig} from "@lodestar/config"; -import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; -import {phase0} from "@lodestar/types"; - -export type EthJsonRpcBlockRaw = { - /** the block number. null when its pending block. `"0x1b4"` */ - number: string; - /** 32 Bytes - hash of the block. null when its pending block. `"0xdc0818cf78f21a8e70579cb46a43643f78291264dda342ae31049421c82d21ae"` */ - hash: string; - /** 32 Bytes - hash of the parent block. `"0xe99e022112df268087ea7eafaf4790497fd21dbeeb6bd7a1721df161a6657a54"` */ - parentHash: string; - /** - * integer of the total difficulty of the chain until this block. `"0x78ed983323d"`. - * Current mainnet value is 0x684de10dc5c03f006b6, 75 bits so requires a bigint. - */ - totalDifficulty: string; - /** the unix timestamp for when the block was collated. `"0x55ba467c"` */ - timestamp: string; -}; - -export interface IEth1Provider { - deployBlock: number; - getBlockNumber(): Promise; - /** Returns HTTP code 200 + value=null if block is not found */ - getBlockByNumber(blockNumber: number | "latest"): Promise; - /** Returns HTTP code 200 + value=null if block is not found */ - getBlockByHash(blockHashHex: string): Promise; - /** null returns are ignored, may return a different number of blocks than expected */ - getBlocksByNumber(fromBlock: number, toBlock: number): Promise; - getDepositEvents(fromBlock: number, toBlock: number): Promise; - validateContract(): Promise; - getState(): Eth1ProviderState; -} - -export enum Eth1ProviderState { - ONLINE = "ONLINE", - OFFLINE = "OFFLINE", - ERROR = "ERROR", - AUTH_FAILED = "AUTH_FAILED", -} - -export type Eth1DataAndDeposits = { - eth1Data: phase0.Eth1Data; - deposits: phase0.Deposit[]; -}; - -export interface IEth1ForBlockProduction { - getEth1DataAndDeposits(state: CachedBeaconStateAllForks): Promise; - - isPollingEth1Data(): boolean; - - /** - * Should stop polling eth1Data after a Electra block is finalized AND deposit_requests_start_index is reached - */ - stopPollingEth1Data(): void; -} - -/** Different Eth1Block from phase0.Eth1Block with blockHash */ -export type Eth1Block = { - blockHash: Uint8Array; - blockNumber: number; - timestamp: number; -}; - -export type BatchDepositEvents = { - depositEvents: phase0.DepositEvent[]; - blockNumber: number; -}; - -export type Eth1Streamer = { - getDepositsStream(fromBlock: number): AsyncGenerator; - getDepositsAndBlockStreamForGenesis(fromBlock: number): AsyncGenerator<[phase0.DepositEvent[], phase0.Eth1Block]>; -}; - -export type IEth1StreamParams = Pick< - BeaconConfig, - "ETH1_FOLLOW_DISTANCE" | "MIN_GENESIS_TIME" | "GENESIS_DELAY" | "SECONDS_PER_ETH1_BLOCK" -> & { - maxBlocksPerPoll: number; -}; - -export type IJson = string | number | boolean | undefined | IJson[] | {[key: string]: IJson}; - -export interface RpcPayload

{ - method: string; - params: P; -} diff --git a/packages/beacon-node/src/eth1/options.ts b/packages/beacon-node/src/eth1/options.ts deleted file mode 100644 index 2f9abdd69b..0000000000 --- a/packages/beacon-node/src/eth1/options.ts +++ /dev/null @@ -1,28 +0,0 @@ -export type Eth1Options = { - enabled?: boolean; - disableEth1DepositDataTracker?: boolean; - providerUrls?: string[]; - /** - * jwtSecretHex is the jwt secret if the eth1 modules should ping the jwt auth - * protected engine endpoints. - */ - jwtSecretHex?: string; - jwtId?: string; - jwtVersion?: string; - depositContractDeployBlock?: number; - unsafeAllowDepositDataOverwrite?: boolean; - /** - * Vote for a specific eth1_data regardless of validity and existing votes. - * hex encoded ssz serialized Eth1Data type. - */ - forcedEth1DataVote?: string; -}; - -export const DEFAULT_PROVIDER_URLS = ["http://localhost:8545"]; - -export const defaultEth1Options: Eth1Options = { - enabled: true, - providerUrls: DEFAULT_PROVIDER_URLS, - depositContractDeployBlock: 0, - unsafeAllowDepositDataOverwrite: false, -}; diff --git a/packages/beacon-node/src/eth1/provider/eth1Provider.ts b/packages/beacon-node/src/eth1/provider/eth1Provider.ts deleted file mode 100644 index c24829be1d..0000000000 --- a/packages/beacon-node/src/eth1/provider/eth1Provider.ts +++ /dev/null @@ -1,229 +0,0 @@ -import {ChainConfig} from "@lodestar/config"; -import {Logger} from "@lodestar/logger"; -import {phase0} from "@lodestar/types"; -import { - FetchError, - createElapsedTimeTracker, - fromHex, - isErrorAborted, - isFetchError, - toHex, - toPrintableUrl, -} from "@lodestar/utils"; -import {HTTP_CONNECTION_ERROR_CODES, HTTP_FATAL_ERROR_CODES} from "../../execution/engine/utils.js"; -import {isValidAddress} from "../../util/address.js"; -import {linspace} from "../../util/numpy.js"; -import {Eth1Block, Eth1ProviderState, EthJsonRpcBlockRaw, IEth1Provider} from "../interface.js"; -import {DEFAULT_PROVIDER_URLS, Eth1Options} from "../options.js"; -import {depositEventTopics, parseDepositLog} from "../utils/depositContract.js"; -import { - ErrorJsonRpcResponse, - HttpRpcError, - JsonRpcHttpClient, - JsonRpcHttpClientEvent, - JsonRpcHttpClientMetrics, - ReqOpts, -} from "./jsonRpcHttpClient.js"; -import {dataToBytes, isJsonRpcTruncatedError, numToQuantity, quantityToNum} from "./utils.js"; - -/** - * Binds return types to Ethereum JSON RPC methods - */ -type EthJsonRpcReturnTypes = { - eth_getBlockByNumber: EthJsonRpcBlockRaw | null; - eth_getBlockByHash: EthJsonRpcBlockRaw | null; - eth_blockNumber: string; - eth_getCode: string; - eth_getLogs: { - removed: boolean; - logIndex: string; - transactionIndex: string; - transactionHash: string; - blockHash: string; - blockNumber: string; - address: string; - data: string; - topics: string[]; - }[]; -}; - -// Define static options once to prevent extra allocations -const getBlocksByNumberOpts: ReqOpts = {routeId: "getBlockByNumber_batched"}; -const getBlockByNumberOpts: ReqOpts = {routeId: "getBlockByNumber"}; -const getBlockByHashOpts: ReqOpts = {routeId: "getBlockByHash"}; -const getBlockNumberOpts: ReqOpts = {routeId: "getBlockNumber"}; -const getLogsOpts: ReqOpts = {routeId: "getLogs"}; - -const isOneMinutePassed = createElapsedTimeTracker({minElapsedTime: 60_000}); - -export class Eth1Provider implements IEth1Provider { - readonly deployBlock: number; - private readonly depositContractAddress: string; - private readonly rpc: JsonRpcHttpClient; - // The default state is ONLINE, it will be updated to offline if we receive a http error - private state: Eth1ProviderState = Eth1ProviderState.ONLINE; - private logger?: Logger; - - constructor( - config: Pick, - opts: Pick & { - logger?: Logger; - }, - signal?: AbortSignal, - metrics?: JsonRpcHttpClientMetrics | null - ) { - this.logger = opts.logger; - this.deployBlock = opts.depositContractDeployBlock ?? 0; - this.depositContractAddress = toHex(config.DEPOSIT_CONTRACT_ADDRESS); - - const providerUrls = opts.providerUrls ?? DEFAULT_PROVIDER_URLS; - this.rpc = new JsonRpcHttpClient(providerUrls, { - signal, - // Don't fallback with is truncated error. Throw early and let the retry on this class handle it - shouldNotFallback: isJsonRpcTruncatedError, - jwtSecret: opts.jwtSecretHex ? fromHex(opts.jwtSecretHex) : undefined, - jwtId: opts.jwtId, - jwtVersion: opts.jwtVersion, - metrics: metrics, - }); - this.logger?.info("Eth1 provider", {urls: providerUrls.map(toPrintableUrl).toString()}); - - this.rpc.emitter.on(JsonRpcHttpClientEvent.RESPONSE, () => { - const oldState = this.state; - this.state = Eth1ProviderState.ONLINE; - - if (oldState !== Eth1ProviderState.ONLINE) { - this.logger?.info("Eth1 provider is back online", {oldState, newState: this.state}); - } - }); - - this.rpc.emitter.on(JsonRpcHttpClientEvent.ERROR, ({error}) => { - if (isErrorAborted(error)) { - this.state = Eth1ProviderState.ONLINE; - } else if ((error as unknown) instanceof HttpRpcError || (error as unknown) instanceof ErrorJsonRpcResponse) { - this.state = Eth1ProviderState.ERROR; - } else if (error && isFetchError(error) && HTTP_FATAL_ERROR_CODES.includes((error as FetchError).code)) { - this.state = Eth1ProviderState.OFFLINE; - } else if (error && isFetchError(error) && HTTP_CONNECTION_ERROR_CODES.includes((error as FetchError).code)) { - this.state = Eth1ProviderState.AUTH_FAILED; - } - - if (this.state !== Eth1ProviderState.ONLINE && isOneMinutePassed()) { - this.logger?.error( - "Eth1 provider error", - { - state: this.state, - lastErrorAt: new Date(Date.now() - isOneMinutePassed.msSinceLastCall).toLocaleTimeString(), - }, - error - ); - } - }); - } - - getState(): Eth1ProviderState { - return this.state; - } - - async validateContract(): Promise { - if (!isValidAddress(this.depositContractAddress)) { - throw Error(`Invalid contract address: ${this.depositContractAddress}`); - } - - const code = await this.getCode(this.depositContractAddress); - if (!code || code === "0x") { - throw new Error(`There is no deposit contract at given address: ${this.depositContractAddress}`); - } - } - - async getDepositEvents(fromBlock: number, toBlock: number): Promise { - const logsRawArr = await this.getLogs({ - fromBlock, - toBlock, - address: this.depositContractAddress, - topics: depositEventTopics, - }); - return logsRawArr.flat(1).map((log) => parseDepositLog(log)); - } - - /** - * Fetches an arbitrary array of block numbers in batch - */ - async getBlocksByNumber(fromBlock: number, toBlock: number): Promise { - const method = "eth_getBlockByNumber"; - const blocksArr = await this.rpc.fetchBatch( - linspace(fromBlock, toBlock).map((blockNumber) => ({method, params: [numToQuantity(blockNumber), false]})), - getBlocksByNumberOpts - ); - const blocks: EthJsonRpcBlockRaw[] = []; - for (const block of blocksArr.flat(1)) { - if (block) blocks.push(block); - } - return blocks; - } - - async getBlockByNumber(blockNumber: number | "latest"): Promise { - const method = "eth_getBlockByNumber"; - const blockNumberHex = typeof blockNumber === "string" ? blockNumber : numToQuantity(blockNumber); - return this.rpc.fetch( - // false = include only transaction roots, not full objects - {method, params: [blockNumberHex, false]}, - getBlockByNumberOpts - ); - } - - async getBlockByHash(blockHashHex: string): Promise { - const method = "eth_getBlockByHash"; - return this.rpc.fetch( - // false = include only transaction roots, not full objects - {method, params: [blockHashHex, false]}, - getBlockByHashOpts - ); - } - - async getBlockNumber(): Promise { - const method = "eth_blockNumber"; - const blockNumberRaw = await this.rpc.fetch( - {method, params: []}, - getBlockNumberOpts - ); - return parseInt(blockNumberRaw, 16); - } - - async getCode(address: string): Promise { - const method = "eth_getCode"; - return this.rpc.fetch({method, params: [address, "latest"]}); - } - - async getLogs(options: { - fromBlock: number; - toBlock: number; - address: string; - topics: string[]; - }): Promise<{blockNumber: number; data: string; topics: string[]}[]> { - const method = "eth_getLogs"; - const hexOptions = { - ...options, - fromBlock: numToQuantity(options.fromBlock), - toBlock: numToQuantity(options.toBlock), - }; - const logsRaw = await this.rpc.fetch( - {method, params: [hexOptions]}, - getLogsOpts - ); - return logsRaw.map((logRaw) => ({ - blockNumber: parseInt(logRaw.blockNumber, 16), - data: logRaw.data, - topics: logRaw.topics, - })); - } -} - -export function parseEth1Block(blockRaw: EthJsonRpcBlockRaw): Eth1Block { - if (typeof blockRaw !== "object") throw Error("block is not an object"); - return { - blockHash: dataToBytes(blockRaw.hash, 32), - blockNumber: quantityToNum(blockRaw.number, "block.number"), - timestamp: quantityToNum(blockRaw.timestamp, "block.timestamp"), - }; -} diff --git a/packages/beacon-node/src/eth1/provider/utils.ts b/packages/beacon-node/src/eth1/provider/utils.ts deleted file mode 100644 index 9b3c88c2e8..0000000000 --- a/packages/beacon-node/src/eth1/provider/utils.ts +++ /dev/null @@ -1,136 +0,0 @@ -import {RootHex} from "@lodestar/types"; -import {bigIntToBytes, bytesToBigInt, fromHex, fromHexInto, toHex} from "@lodestar/utils"; -import {ErrorParseJson} from "./jsonRpcHttpClient.js"; - -/** QUANTITY as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API */ -export type QUANTITY = string; -/** DATA as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API */ -export type DATA = string; - -export const rootHexRegex = /^0x[a-fA-F0-9]{64}$/; - -export function numberToHex(n: number | bigint): string { - return "0x" + n.toString(16); -} - -export function isJsonRpcTruncatedError(error: Error): boolean { - return ( - // Truncated responses usually get as 200 but since it's truncated the JSON will be invalid - error instanceof ErrorParseJson || - // Otherwise guess Infura error message of too many events - (error instanceof Error && error.message.includes("query returned more than 10000 results")) || - // Nethermind enforces limits on JSON RPC batch calls - (error instanceof Error && error.message.toLowerCase().includes("batch size limit exceeded")) - ); -} - -export function bytesToHex(bytes: Uint8Array): string { - // Handle special case in Ethereum hex formating where hex values may include a single letter - // 0x0, 0x1 are valid values - if (bytes.length === 1 && bytes[0] <= 0xf) { - return "0x" + bytes[0].toString(16); - } - - return toHex(bytes); -} - -/** - * QUANTITY as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API - * - * When encoding QUANTITIES (integers, numbers): encode as hex, prefix with “0x”, the most compact representation (slight exception: zero should be represented as “0x0”). Examples: - * - 0x41 (65 in decimal) - * - 0x400 (1024 in decimal) - * - WRONG: 0x (should always have at least one digit - zero is “0x0”) - * - WRONG: 0x0400 (no leading zeroes allowed) - * - WRONG: ff (must be prefixed 0x) - */ -export function numToQuantity(num: number | bigint): QUANTITY { - return "0x" + num.toString(16); -} - -/** - * QUANTITY as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API - */ -export function quantityToNum(hex: QUANTITY, id = ""): number { - const num = parseInt(hex, 16); - if (Number.isNaN(num) || num < 0) throw Error(`Invalid hex decimal ${id} '${hex}'`); - return num; -} - -/** - * QUANTITY as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API. - * Typesafe fn to convert hex string to bigint. The BigInt constructor param is any - */ -export function quantityToBigint(hex: QUANTITY, id = ""): bigint { - try { - return BigInt(hex); - } catch (e) { - throw Error(`Invalid hex bigint ${id} '${hex}': ${(e as Error).message}`); - } -} - -/** - * QUANTITY as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API. - */ -export function quantityToBytes(hex: QUANTITY): Uint8Array { - const bn = quantityToBigint(hex); - return bigIntToBytes(bn, 32, "le"); -} - -/** - * QUANTITY as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API. - * Compress a 32 ByteVector into a QUANTITY - */ -export function bytesToQuantity(bytes: Uint8Array): QUANTITY { - const bn = bytesToBigInt(bytes, "le"); - return numToQuantity(bn); -} - -/** - * DATA as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API - * - * When encoding UNFORMATTED DATA (byte arrays, account addresses, hashes, bytecode arrays): encode as hex, prefix with - * “0x”, two hex digits per byte. Examples: - * - * - 0x41 (size 1, “A”) - * - 0x004200 (size 3, “\0B\0”) - * - 0x (size 0, “”) - * - WRONG: 0xf0f0f (must be even number of digits) - * - WRONG: 004200 (must be prefixed 0x) - */ -export function bytesToData(bytes: Uint8Array): DATA { - return toHex(bytes); -} - -/** - * DATA as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API - */ -export function dataToBytes(hex: DATA, fixedLength: number | null): Uint8Array { - try { - const bytes = fromHex(hex); - if (fixedLength != null && bytes.length !== fixedLength) { - throw Error(`Wrong data length ${bytes.length} expected ${fixedLength}`); - } - return bytes; - } catch (e) { - (e as Error).message = `Invalid hex string: ${(e as Error).message}`; - throw e; - } -} - -/** - * Convert DATA into a preallocated buffer - * fromHexInto will throw if buffer's length is not the same as the decoded hex length - */ -export function dataIntoBytes(hex: DATA, buffer: Uint8Array): Uint8Array { - fromHexInto(hex, buffer); - return buffer; -} - -/** - * DATA as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API - */ -export function dataToRootHex(hex: DATA, id = ""): RootHex { - if (!rootHexRegex.test(hex)) throw Error(`Invalid hex root ${id} '${hex}'`); - return hex; -} diff --git a/packages/beacon-node/src/eth1/stream.ts b/packages/beacon-node/src/eth1/stream.ts deleted file mode 100644 index f4adaf2829..0000000000 --- a/packages/beacon-node/src/eth1/stream.ts +++ /dev/null @@ -1,75 +0,0 @@ -import {phase0} from "@lodestar/types"; -import {sleep} from "@lodestar/utils"; -import {BatchDepositEvents, Eth1Block, IEth1Provider, IEth1StreamParams} from "./interface.js"; -import {parseEth1Block} from "./provider/eth1Provider.js"; -import {groupDepositEventsByBlock} from "./utils/groupDepositEventsByBlock.js"; -import {optimizeNextBlockDiffForGenesis} from "./utils/optimizeNextBlockDiffForGenesis.js"; - -/** - * Phase 1 of genesis building. - * Not enough validators, only stream deposits - * @param signal Abort stream returning after a while loop cycle. Aborts internal sleep - */ -export async function* getDepositsStream( - fromBlock: number, - provider: IEth1Provider, - params: IEth1StreamParams, - signal?: AbortSignal -): AsyncGenerator { - fromBlock = Math.max(fromBlock, provider.deployBlock); - - while (true) { - const remoteFollowBlock = await getRemoteFollowBlock(provider, params); - const toBlock = Math.min(remoteFollowBlock, fromBlock + params.maxBlocksPerPoll); - const logs = await provider.getDepositEvents(fromBlock, toBlock); - for (const batchedDeposits of groupDepositEventsByBlock(logs)) { - yield batchedDeposits; - } - - fromBlock = toBlock; - - // If reached head, sleep for an eth1 block. Throws if signal is aborted - await sleep(toBlock >= remoteFollowBlock ? params.SECONDS_PER_ETH1_BLOCK * 1000 : 10, signal); - } -} - -/** - * Phase 2 of genesis building. - * There are enough validators, stream deposits and blocks - * @param signal Abort stream returning after a while loop cycle. Aborts internal sleep - */ -export async function* getDepositsAndBlockStreamForGenesis( - fromBlock: number, - provider: IEth1Provider, - params: IEth1StreamParams, - signal?: AbortSignal -): AsyncGenerator<[phase0.DepositEvent[], Eth1Block]> { - fromBlock = Math.max(fromBlock, provider.deployBlock); - fromBlock = Math.min(fromBlock, await getRemoteFollowBlock(provider, params)); - let toBlock = fromBlock; // First, fetch only the first block - - while (true) { - const [logs, blockRaw] = await Promise.all([ - provider.getDepositEvents(fromBlock, toBlock), - provider.getBlockByNumber(toBlock), - ]); - - if (!blockRaw) throw Error(`No block found for number ${toBlock}`); - const block = parseEth1Block(blockRaw); - - yield [logs, block]; - - const remoteFollowBlock = await getRemoteFollowBlock(provider, params); - const nextBlockDiff = optimizeNextBlockDiffForGenesis(block, params); - fromBlock = toBlock; - toBlock = Math.min(remoteFollowBlock, fromBlock + Math.min(nextBlockDiff, params.maxBlocksPerPoll)); - - // If reached head, sleep for an eth1 block. Throws if signal is aborted - await sleep(toBlock >= remoteFollowBlock ? params.SECONDS_PER_ETH1_BLOCK * 1000 : 10, signal); - } -} - -async function getRemoteFollowBlock(provider: IEth1Provider, params: IEth1StreamParams): Promise { - const remoteHighestBlock = await provider.getBlockNumber(); - return Math.max(remoteHighestBlock - params.ETH1_FOLLOW_DISTANCE, 0); -} diff --git a/packages/beacon-node/src/eth1/utils/depositContract.ts b/packages/beacon-node/src/eth1/utils/depositContract.ts deleted file mode 100644 index b576a3d5f6..0000000000 --- a/packages/beacon-node/src/eth1/utils/depositContract.ts +++ /dev/null @@ -1,37 +0,0 @@ -import {Interface} from "@ethersproject/abi"; -import {phase0, ssz} from "@lodestar/types"; -import {fromHex} from "@lodestar/utils"; - -const depositEventFragment = - "event DepositEvent(bytes pubkey, bytes withdrawal_credentials, bytes amount, bytes signature, bytes index)"; - -const depositContractInterface = new Interface([depositEventFragment]); - -/** - * Precomputed topics of DepositEvent logs - */ -export const depositEventTopics = [depositContractInterface.getEventTopic("DepositEvent")]; - -/** - * Parse DepositEvent log - */ -export function parseDepositLog(log: {blockNumber: number; data: string; topics: string[]}): phase0.DepositEvent { - const event = depositContractInterface.parseLog(log); - const values = event.args; - if (values === undefined) throw Error(`DepositEvent at ${log.blockNumber} has no values`); - return { - blockNumber: log.blockNumber, - index: parseHexNumLittleEndian(values.index), - depositData: { - pubkey: fromHex(values.pubkey), - withdrawalCredentials: fromHex(values.withdrawal_credentials), - amount: parseHexNumLittleEndian(values.amount), - signature: fromHex(values.signature), - }, - }; -} - -function parseHexNumLittleEndian(hex: string): number { - // Can't use parseInt() because amount is a hex string in little endian - return ssz.UintNum64.deserialize(fromHex(hex)); -} diff --git a/packages/beacon-node/src/eth1/utils/deposits.ts b/packages/beacon-node/src/eth1/utils/deposits.ts deleted file mode 100644 index 470fab634f..0000000000 --- a/packages/beacon-node/src/eth1/utils/deposits.ts +++ /dev/null @@ -1,70 +0,0 @@ -import {Tree, toGindex} from "@chainsafe/persistent-merkle-tree"; -import {FilterOptions} from "@lodestar/db"; -import {CachedBeaconStateAllForks, getEth1DepositCount} from "@lodestar/state-transition"; -import {phase0, ssz} from "@lodestar/types"; -import {toRootHex} from "@lodestar/utils"; -import {DepositTree} from "../../db/repositories/depositDataRoot.js"; -import {Eth1Error, Eth1ErrorCode} from "../errors.js"; - -export type DepositGetter = (indexRange: FilterOptions, eth1Data: phase0.Eth1Data) => Promise; - -export async function getDeposits( - // eth1_deposit_index represents the next deposit index to be added - state: CachedBeaconStateAllForks, - eth1Data: phase0.Eth1Data, - depositsGetter: DepositGetter -): Promise { - const depositIndex = state.eth1DepositIndex; - const depositCount = eth1Data.depositCount; - - if (depositIndex > depositCount) { - throw new Eth1Error({code: Eth1ErrorCode.DEPOSIT_INDEX_TOO_HIGH, depositIndex, depositCount}); - } - - const depositsLen = getEth1DepositCount(state, eth1Data); - - if (depositsLen === 0) { - return []; // If depositsLen === 0, we can return early since no deposit with be returned from depositsGetter - } - - const indexRange = {gte: depositIndex, lt: depositIndex + depositsLen}; - const deposits = await depositsGetter(indexRange, eth1Data); - - if (deposits.length < depositsLen) { - throw new Eth1Error({code: Eth1ErrorCode.NOT_ENOUGH_DEPOSITS, len: deposits.length, expectedLen: depositsLen}); - } - - if (deposits.length > depositsLen) { - throw new Eth1Error({code: Eth1ErrorCode.TOO_MANY_DEPOSITS, len: deposits.length, expectedLen: depositsLen}); - } - - return deposits; -} - -export function getDepositsWithProofs( - depositEvents: phase0.DepositEvent[], - depositRootTree: DepositTree, - eth1Data: phase0.Eth1Data -): phase0.Deposit[] { - // Get tree at this particular depositCount to compute correct proofs - const viewAtDepositCount = depositRootTree.sliceTo(eth1Data.depositCount - 1); - - const depositRoot = viewAtDepositCount.hashTreeRoot(); - - if (!ssz.Root.equals(depositRoot, eth1Data.depositRoot)) { - throw new Eth1Error({ - code: Eth1ErrorCode.WRONG_DEPOSIT_ROOT, - root: toRootHex(depositRoot), - expectedRoot: toRootHex(eth1Data.depositRoot), - }); - } - - // Already commited for .hashTreeRoot() - const treeAtDepositCount = new Tree(viewAtDepositCount.node); - const depositTreeDepth = viewAtDepositCount.type.depth; - - return depositEvents.map((log) => ({ - proof: treeAtDepositCount.getSingleProof(toGindex(depositTreeDepth, BigInt(log.index))), - data: log.depositData, - })); -} diff --git a/packages/beacon-node/src/eth1/utils/eth1Data.ts b/packages/beacon-node/src/eth1/utils/eth1Data.ts deleted file mode 100644 index fd83e72434..0000000000 --- a/packages/beacon-node/src/eth1/utils/eth1Data.ts +++ /dev/null @@ -1,100 +0,0 @@ -import {Root, phase0} from "@lodestar/types"; -import {DepositTree} from "../../db/repositories/depositDataRoot.js"; -import {binarySearchLte} from "../../util/binarySearch.js"; -import {Eth1Error, Eth1ErrorCode} from "../errors.js"; -import {Eth1Block} from "../interface.js"; - -type BlockNumber = number; - -/** - * Appends partial eth1 data (depositRoot, depositCount) in a sequence of blocks - * eth1 data deposit is inferred from sparse eth1 data obtained from the deposit logs - */ -export async function getEth1DataForBlocks( - blocks: Eth1Block[], - depositDescendingStream: AsyncIterable, - depositRootTree: DepositTree, - lastProcessedDepositBlockNumber: BlockNumber | null -): Promise<(phase0.Eth1Data & Eth1Block)[]> { - // Exclude blocks for which there is no valid eth1 data deposit - if (lastProcessedDepositBlockNumber !== null) { - blocks = blocks.filter((block) => block.blockNumber <= lastProcessedDepositBlockNumber); - } - - // A valid block can be constructed using previous `state.eth1Data`, don't throw - if (blocks.length === 0) { - return []; - } - - // Collect the latest deposit of each blockNumber in a block number range - const fromBlock = blocks[0].blockNumber; - const toBlock = blocks.at(-1)?.blockNumber as number; - const depositsByBlockNumber = await getDepositsByBlockNumber(fromBlock, toBlock, depositDescendingStream); - if (depositsByBlockNumber.length === 0) { - throw new Eth1Error({code: Eth1ErrorCode.NO_DEPOSITS_FOR_BLOCK_RANGE, fromBlock, toBlock}); - } - - // Precompute a map of depositCount => depositRoot (from depositRootTree) - const depositCounts = depositsByBlockNumber.map((event) => event.index + 1); - const depositRootByDepositCount = getDepositRootByDepositCount(depositCounts, depositRootTree); - - const eth1Datas: (phase0.Eth1Data & Eth1Block)[] = []; - for (const block of blocks) { - const deposit = binarySearchLte(depositsByBlockNumber, block.blockNumber, (event) => event.blockNumber); - const depositCount = deposit.index + 1; - const depositRoot = depositRootByDepositCount.get(depositCount); - if (depositRoot === undefined) { - throw new Eth1Error({code: Eth1ErrorCode.NO_DEPOSIT_ROOT, depositCount}); - } - eth1Datas.push({...block, depositCount, depositRoot}); - } - return eth1Datas; -} - -/** - * Collect depositCount by blockNumber from a stream matching a block number range - * For a given blockNumber it's depositCount is equal to the index + 1 of the - * closest deposit event whose deposit.blockNumber <= blockNumber - * @returns array ascending by blockNumber - */ -export async function getDepositsByBlockNumber( - fromBlock: BlockNumber, - toBlock: BlockNumber, - depositEventDescendingStream: AsyncIterable -): Promise { - const depositCountMap = new Map(); - // Take blocks until the block under the range lower bound (included) - for await (const deposit of depositEventDescendingStream) { - if (deposit.blockNumber <= toBlock && !depositCountMap.has(deposit.blockNumber)) { - depositCountMap.set(deposit.blockNumber, deposit); - } - if (deposit.blockNumber < fromBlock) { - break; - } - } - - return Array.from(depositCountMap.values()).sort((a, b) => a.blockNumber - b.blockNumber); -} - -/** - * Precompute a map of depositCount => depositRoot from a depositRootTree filled beforehand - */ -export function getDepositRootByDepositCount(depositCounts: number[], depositRootTree: DepositTree): Map { - // Unique + sort numerically in descending order - depositCounts = [...new Set(depositCounts)].sort((a, b) => b - a); - - if (depositCounts.length > 0) { - const maxIndex = depositCounts[0] - 1; - const treeLength = depositRootTree.length - 1; - if (maxIndex > treeLength) { - throw new Eth1Error({code: Eth1ErrorCode.NOT_ENOUGH_DEPOSIT_ROOTS, index: maxIndex, treeLength}); - } - } - - const depositRootByDepositCount = new Map(); - for (const depositCount of depositCounts) { - depositRootTree = depositRootTree.sliceTo(depositCount - 1); - depositRootByDepositCount.set(depositCount, depositRootTree.hashTreeRoot()); - } - return depositRootByDepositCount; -} diff --git a/packages/beacon-node/src/eth1/utils/eth1DepositEvent.ts b/packages/beacon-node/src/eth1/utils/eth1DepositEvent.ts deleted file mode 100644 index cd212831fc..0000000000 --- a/packages/beacon-node/src/eth1/utils/eth1DepositEvent.ts +++ /dev/null @@ -1,12 +0,0 @@ -/** - * Assert that an array of deposits are consecutive and ascending - */ -export function assertConsecutiveDeposits(depositEvents: {index: number}[]): void { - for (let i = 0; i < depositEvents.length - 1; i++) { - const indexLeft = depositEvents[i].index; - const indexRight = depositEvents[i + 1].index; - if (indexLeft !== indexRight - 1) { - throw Error(`Non consecutive deposits. deposit[${i}] = ${indexLeft}, deposit[${i + 1}] ${indexRight}`); - } - } -} diff --git a/packages/beacon-node/src/eth1/utils/eth1Vote.ts b/packages/beacon-node/src/eth1/utils/eth1Vote.ts deleted file mode 100644 index a101907b49..0000000000 --- a/packages/beacon-node/src/eth1/utils/eth1Vote.ts +++ /dev/null @@ -1,142 +0,0 @@ -import {ChainForkConfig} from "@lodestar/config"; -import {EPOCHS_PER_ETH1_VOTING_PERIOD, SLOTS_PER_EPOCH, isForkPostElectra} from "@lodestar/params"; -import {BeaconStateAllForks, BeaconStateElectra, computeTimeAtSlot} from "@lodestar/state-transition"; -import {RootHex, phase0} from "@lodestar/types"; -import {toRootHex} from "@lodestar/utils"; - -export type Eth1DataGetter = ({ - timestampRange, -}: { - timestampRange: {gte: number; lte: number}; -}) => Promise; - -export async function getEth1VotesToConsider( - config: ChainForkConfig, - state: BeaconStateAllForks, - eth1DataGetter: Eth1DataGetter -): Promise { - const fork = config.getForkName(state.slot); - if (isForkPostElectra(fork)) { - const {eth1DepositIndex, depositRequestsStartIndex} = state as BeaconStateElectra; - if (eth1DepositIndex === Number(depositRequestsStartIndex)) { - return state.eth1DataVotes.getAllReadonly(); - } - } - - const periodStart = votingPeriodStartTime(config, state); - const {SECONDS_PER_ETH1_BLOCK, ETH1_FOLLOW_DISTANCE} = config; - - // Modified version of the spec function to fetch the required range directly from the DB - return ( - await eth1DataGetter({ - timestampRange: { - // Spec v0.12.2 - // is_candidate_block = - // block.timestamp + SECONDS_PER_ETH1_BLOCK * ETH1_FOLLOW_DISTANCE <= period_start && - // block.timestamp + SECONDS_PER_ETH1_BLOCK * ETH1_FOLLOW_DISTANCE * 2 >= period_start - lte: periodStart - SECONDS_PER_ETH1_BLOCK * ETH1_FOLLOW_DISTANCE, - gte: periodStart - SECONDS_PER_ETH1_BLOCK * ETH1_FOLLOW_DISTANCE * 2, - }, - }) - ).filter((eth1Data) => eth1Data.depositCount >= state.eth1Data.depositCount); -} - -export function pickEth1Vote(state: BeaconStateAllForks, votesToConsider: phase0.Eth1Data[]): phase0.Eth1Data { - const votesToConsiderKeys = new Set(); - for (const eth1Data of votesToConsider) { - votesToConsiderKeys.add(getEth1DataKey(eth1Data)); - } - - const eth1DataHashToEth1Data = new Map(); - const eth1DataVoteCountByRoot = new Map(); - const eth1DataVotesOrder: RootHex[] = []; - - // BeaconStateAllForks is always represented as a tree with a hashing cache. - // To check equality its cheaper to use hashTreeRoot as keys. - // However `votesToConsider` is an array of values since those are read from DB. - // TODO: Optimize cache of known votes, to prevent re-hashing stored values. - // Note: for low validator counts it's not very important, since this runs once per proposal - const eth1DataVotes = state.eth1DataVotes.getAllReadonly(); - for (const eth1DataVote of eth1DataVotes) { - const rootHex = getEth1DataKey(eth1DataVote); - - if (votesToConsiderKeys.has(rootHex)) { - const prevVoteCount = eth1DataVoteCountByRoot.get(rootHex); - eth1DataVoteCountByRoot.set(rootHex, 1 + (prevVoteCount ?? 0)); - - // Cache eth1DataVote to root Map only once per root - if (prevVoteCount === undefined) { - eth1DataHashToEth1Data.set(rootHex, eth1DataVote); - eth1DataVotesOrder.push(rootHex); - } - } - } - - const eth1DataRootsMaxVotes = getKeysWithMaxValue(eth1DataVoteCountByRoot); - - // No votes, vote for the last valid vote - if (eth1DataRootsMaxVotes.length === 0) { - return votesToConsider.at(-1) ?? state.eth1Data; - } - - // If there's a single winning vote with a majority vote that one - if (eth1DataRootsMaxVotes.length === 1) { - return eth1DataHashToEth1Data.get(eth1DataRootsMaxVotes[0]) ?? state.eth1Data; - } - - // If there are multiple winning votes, vote for the latest one - const latestMostVotedRoot = - eth1DataVotesOrder[Math.max(...eth1DataRootsMaxVotes.map((root) => eth1DataVotesOrder.indexOf(root)))]; - return eth1DataHashToEth1Data.get(latestMostVotedRoot) ?? state.eth1Data; -} - -/** - * Returns the array of keys with max value. May return 0, 1 or more keys - */ -function getKeysWithMaxValue(map: Map): T[] { - const entries = Array.from(map.entries()); - let keysMax: T[] = []; - let valueMax = -Infinity; - - for (const [key, value] of entries) { - if (value > valueMax) { - keysMax = [key]; - valueMax = value; - } else if (value === valueMax) { - keysMax.push(key); - } - } - - return keysMax; -} - -/** - * Key-ed by fastSerializeEth1Data(). votesToConsider is read from DB as struct and always has a length of 2048. - * `state.eth1DataVotes` has a length between 0 and ETH1_FOLLOW_DISTANCE with an equal probability of each value. - * So to get the average faster time to key both votesToConsider and state.eth1DataVotes it's better to use - * fastSerializeEth1Data(). However, a long term solution is to cache valid votes in memory and prevent having - * to recompute their key on every proposal. - * - * With `fastSerializeEth1Data()`: avg time 20 ms/op - * ✓ pickEth1Vote - no votes 233.0587 ops/s 4.290764 ms/op - 121 runs 1.02 s - * ✓ pickEth1Vote - max votes 29.21546 ops/s 34.22845 ms/op - 25 runs 1.38 s - * - * With `toHexString(ssz.phase0.Eth1Data.hashTreeRoot(eth1Data))`: avg time 23 ms/op - * ✓ pickEth1Vote - no votes 46.12341 ops/s 21.68096 ms/op - 133 runs 3.40 s - * ✓ pickEth1Vote - max votes 37.89912 ops/s 26.38583 ms/op - 29 runs 1.27 s - */ -function getEth1DataKey(eth1Data: phase0.Eth1Data): string { - return fastSerializeEth1Data(eth1Data); -} - -/** - * Serialize eth1Data types to a unique string ID. It is only used for comparison. - */ -export function fastSerializeEth1Data(eth1Data: phase0.Eth1Data): string { - return toRootHex(eth1Data.blockHash) + eth1Data.depositCount.toString(16) + toRootHex(eth1Data.depositRoot); -} - -export function votingPeriodStartTime(config: ChainForkConfig, state: BeaconStateAllForks): number { - const eth1VotingPeriodStartSlot = state.slot - (state.slot % (EPOCHS_PER_ETH1_VOTING_PERIOD * SLOTS_PER_EPOCH)); - return computeTimeAtSlot(config, eth1VotingPeriodStartSlot, state.genesisTime); -} diff --git a/packages/beacon-node/src/eth1/utils/groupDepositEventsByBlock.ts b/packages/beacon-node/src/eth1/utils/groupDepositEventsByBlock.ts deleted file mode 100644 index bee8ae080c..0000000000 --- a/packages/beacon-node/src/eth1/utils/groupDepositEventsByBlock.ts +++ /dev/null @@ -1,19 +0,0 @@ -import {phase0} from "@lodestar/types"; -import {BatchDepositEvents} from "../interface.js"; - -/** - * Return deposit events of blocks grouped/sorted by block number and deposit index - * Blocks without events are omitted - * @param depositEvents range deposit events - */ -export function groupDepositEventsByBlock(depositEvents: phase0.DepositEvent[]): BatchDepositEvents[] { - depositEvents.sort((event1, event2) => event1.index - event2.index); - const depositsByBlockMap = new Map(); - for (const deposit of depositEvents) { - depositsByBlockMap.set(deposit.blockNumber, [...(depositsByBlockMap.get(deposit.blockNumber) || []), deposit]); - } - return Array.from(depositsByBlockMap.entries()).map(([blockNumber, depositEvents]) => ({ - blockNumber, - depositEvents, - })); -} diff --git a/packages/beacon-node/src/eth1/utils/optimizeNextBlockDiffForGenesis.ts b/packages/beacon-node/src/eth1/utils/optimizeNextBlockDiffForGenesis.ts deleted file mode 100644 index 961d58680e..0000000000 --- a/packages/beacon-node/src/eth1/utils/optimizeNextBlockDiffForGenesis.ts +++ /dev/null @@ -1,18 +0,0 @@ -import {ChainConfig} from "@lodestar/config"; - -/** - * Utility for fetching genesis min genesis time block - * Returns an approximation of the next block diff to fetch to progressively - * get closer to the block that satisfies min genesis time condition - */ -export function optimizeNextBlockDiffForGenesis( - lastFetchedBlock: {timestamp: number}, - params: Pick -): number { - const timeToGenesis = params.MIN_GENESIS_TIME - params.GENESIS_DELAY - lastFetchedBlock.timestamp; - const numBlocksToGenesis = Math.floor(timeToGenesis / params.SECONDS_PER_ETH1_BLOCK); - if (numBlocksToGenesis <= 2) { - return 1; - } - return Math.max(1, Math.floor(numBlocksToGenesis / 2)); -} diff --git a/packages/beacon-node/src/execution/engine/http.ts b/packages/beacon-node/src/execution/engine/http.ts index 32739eba4c..0885268606 100644 --- a/packages/beacon-node/src/execution/engine/http.ts +++ b/packages/beacon-node/src/execution/engine/http.ts @@ -4,14 +4,6 @@ import {BlobsBundle, ExecutionPayload, ExecutionRequests, Root, RootHex, Wei} fr import {BlobAndProof} from "@lodestar/types/deneb"; import {BlobAndProofV2} from "@lodestar/types/fulu"; import {strip0xPrefix} from "@lodestar/utils"; -import { - ErrorJsonRpcResponse, - HttpRpcError, - IJsonRpcHttpClient, - JsonRpcHttpClientEvent, - ReqOpts, -} from "../../eth1/provider/jsonRpcHttpClient.js"; -import {bytesToData, numToQuantity} from "../../eth1/provider/utils.js"; import {Metrics} from "../../metrics/index.js"; import {EPOCHS_PER_BATCH} from "../../sync/constants.js"; import {getLodestarClientVersion} from "../../util/metadata.js"; @@ -27,6 +19,13 @@ import { PayloadId, VersionedHashes, } from "./interface.js"; +import { + ErrorJsonRpcResponse, + HttpRpcError, + IJsonRpcHttpClient, + JsonRpcHttpClientEvent, + ReqOpts, +} from "./jsonRpcHttpClient.js"; import {PayloadIdCache} from "./payloadIdCache.js"; import { BLOB_AND_PROOF_V2_RPC_BYTES, @@ -45,7 +44,7 @@ import { serializePayloadAttributes, serializeVersionedHashes, } from "./types.js"; -import {getExecutionEngineState} from "./utils.js"; +import {bytesToData, getExecutionEngineState, numToQuantity} from "./utils.js"; export type ExecutionEngineModules = { signal: AbortSignal; diff --git a/packages/beacon-node/src/execution/engine/index.ts b/packages/beacon-node/src/execution/engine/index.ts index d339276e2c..b67c7c57d4 100644 --- a/packages/beacon-node/src/execution/engine/index.ts +++ b/packages/beacon-node/src/execution/engine/index.ts @@ -1,5 +1,4 @@ import {fromHex, toPrintableUrl} from "@lodestar/utils"; -import {JsonRpcHttpClient} from "../../eth1/provider/jsonRpcHttpClient.js"; import {ExecutionEngineDisabled} from "./disabled.js"; import { ExecutionEngineHttp, @@ -8,6 +7,7 @@ import { defaultExecutionEngineHttpOpts, } from "./http.js"; import {IExecutionEngine} from "./interface.js"; +import {JsonRpcHttpClient} from "./jsonRpcHttpClient.js"; import {ExecutionEngineMockBackend, ExecutionEngineMockOpts} from "./mock.js"; import {ExecutionEngineMockJsonRpcClient, JsonRpcBackend} from "./utils.js"; diff --git a/packages/beacon-node/src/execution/engine/interface.ts b/packages/beacon-node/src/execution/engine/interface.ts index 897a790be9..eb9553becf 100644 --- a/packages/beacon-node/src/execution/engine/interface.ts +++ b/packages/beacon-node/src/execution/engine/interface.ts @@ -9,9 +9,9 @@ import { import {BlobsBundle, ExecutionPayload, ExecutionRequests, Root, RootHex, Wei, capella} from "@lodestar/types"; import {BlobAndProof} from "@lodestar/types/deneb"; import {BlobAndProofV2} from "@lodestar/types/fulu"; -import {DATA} from "../../eth1/provider/utils.js"; import {PayloadId, PayloadIdCache, WithdrawalV1} from "./payloadIdCache.js"; import {ExecutionPayloadBody} from "./types.js"; +import {DATA} from "./utils.js"; export {PayloadIdCache, type PayloadId, type WithdrawalV1}; diff --git a/packages/beacon-node/src/eth1/provider/jsonRpcHttpClient.ts b/packages/beacon-node/src/execution/engine/jsonRpcHttpClient.ts similarity index 99% rename from packages/beacon-node/src/eth1/provider/jsonRpcHttpClient.ts rename to packages/beacon-node/src/execution/engine/jsonRpcHttpClient.ts index 9c7807a413..5d852ee690 100644 --- a/packages/beacon-node/src/eth1/provider/jsonRpcHttpClient.ts +++ b/packages/beacon-node/src/execution/engine/jsonRpcHttpClient.ts @@ -1,8 +1,8 @@ import {EventEmitter} from "node:events"; import {StrictEventEmitter} from "strict-event-emitter-types"; import {ErrorAborted, Gauge, Histogram, TimeoutError, fetch, isValidHttpUrl, retry} from "@lodestar/utils"; -import {IJson, RpcPayload} from "../interface.js"; import {JwtClaim, encodeJwtToken} from "./jwt.js"; +import {IJson, RpcPayload} from "./utils.js"; export enum JsonRpcHttpClientEvent { /** diff --git a/packages/beacon-node/src/eth1/provider/jwt.ts b/packages/beacon-node/src/execution/engine/jwt.ts similarity index 100% rename from packages/beacon-node/src/eth1/provider/jwt.ts rename to packages/beacon-node/src/execution/engine/jwt.ts diff --git a/packages/beacon-node/src/execution/engine/mock.ts b/packages/beacon-node/src/execution/engine/mock.ts index b6d30dece5..f57fa548e8 100644 --- a/packages/beacon-node/src/execution/engine/mock.ts +++ b/packages/beacon-node/src/execution/engine/mock.ts @@ -11,7 +11,6 @@ import { import {ExecutionPayload, RootHex, bellatrix, deneb, ssz} from "@lodestar/types"; import {fromHex, toRootHex} from "@lodestar/utils"; import {ZERO_HASH_HEX} from "../../constants/index.js"; -import {quantityToNum} from "../../eth1/provider/utils.js"; import {INTEROP_BLOCK_HASH} from "../../node/utils/interop/state.js"; import {kzgCommitmentToVersionedHash} from "../../util/blobs.js"; import {kzg} from "../../util/kzg.js"; @@ -29,7 +28,7 @@ import { serializeExecutionPayload, serializeExecutionRequests, } from "./types.js"; -import {JsonRpcBackend} from "./utils.js"; +import {JsonRpcBackend, quantityToNum} from "./utils.js"; const INTEROP_GAS_LIMIT = 30e6; const PRUNE_PAYLOAD_ID_AFTER_MS = 5000; diff --git a/packages/beacon-node/src/execution/engine/payloadIdCache.ts b/packages/beacon-node/src/execution/engine/payloadIdCache.ts index cf0ff3e18b..56ce1d6ab7 100644 --- a/packages/beacon-node/src/execution/engine/payloadIdCache.ts +++ b/packages/beacon-node/src/execution/engine/payloadIdCache.ts @@ -1,7 +1,7 @@ import {SLOTS_PER_EPOCH} from "@lodestar/params"; import {pruneSetToMax} from "@lodestar/utils"; -import {DATA, QUANTITY} from "../../eth1/provider/utils.js"; import {PayloadAttributesRpc} from "./types.js"; +import {DATA, QUANTITY} from "./utils.js"; // Idealy this only need to be set to the max head reorgs number const MAX_PAYLOAD_IDS = SLOTS_PER_EPOCH; diff --git a/packages/beacon-node/src/execution/engine/types.ts b/packages/beacon-node/src/execution/engine/types.ts index d1da8e7d67..cfc910d8d2 100644 --- a/packages/beacon-node/src/execution/engine/types.ts +++ b/packages/beacon-node/src/execution/engine/types.ts @@ -23,6 +23,14 @@ import { } from "@lodestar/types"; import {BlobAndProof} from "@lodestar/types/deneb"; import {BlobAndProofV2} from "@lodestar/types/fulu"; +import { + ExecutionPayloadStatus, + ExecutionRequestType, + PayloadAttributes, + VersionedHashes, + isExecutionRequestType, +} from "./interface.js"; +import {WithdrawalV1} from "./payloadIdCache.js"; import { DATA, QUANTITY, @@ -32,15 +40,7 @@ import { numToQuantity, quantityToBigint, quantityToNum, -} from "../../eth1/provider/utils.js"; -import { - ExecutionPayloadStatus, - ExecutionRequestType, - PayloadAttributes, - VersionedHashes, - isExecutionRequestType, -} from "./interface.js"; -import {WithdrawalV1} from "./payloadIdCache.js"; +} from "./utils.js"; export type EngineApiRpcParamTypes = { /** diff --git a/packages/beacon-node/src/execution/engine/utils.ts b/packages/beacon-node/src/execution/engine/utils.ts index 8c4a3e81e5..cae8be9884 100644 --- a/packages/beacon-node/src/execution/engine/utils.ts +++ b/packages/beacon-node/src/execution/engine/utils.ts @@ -1,14 +1,120 @@ -import {isErrorAborted, isFetchError} from "@lodestar/utils"; -import {IJson, RpcPayload} from "../../eth1/interface.js"; +import {bigIntToBytes, bytesToBigInt, fromHex, fromHexInto, isErrorAborted, isFetchError, toHex} from "@lodestar/utils"; +import {isQueueErrorAborted} from "../../util/queue/errors.js"; +import {ExecutionEngineState, ExecutionPayloadStatus} from "./interface.js"; import { ErrorJsonRpcResponse, HttpRpcError, IJsonRpcHttpClient, JsonRpcHttpClientEvent, JsonRpcHttpClientEventEmitter, -} from "../../eth1/provider/jsonRpcHttpClient.js"; -import {isQueueErrorAborted} from "../../util/queue/errors.js"; -import {ExecutionEngineState, ExecutionPayloadStatus} from "./interface.js"; +} from "./jsonRpcHttpClient.js"; + +/** QUANTITY as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API */ +export type QUANTITY = string; +/** DATA as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API */ +export type DATA = string; + +export const rootHexRegex = /^0x[a-fA-F0-9]{64}$/; + +export type IJson = string | number | boolean | undefined | IJson[] | {[key: string]: IJson}; + +export interface RpcPayload

{ + method: string; + params: P; +} + +/** + * QUANTITY as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API + * + * When encoding QUANTITIES (integers, numbers): encode as hex, prefix with “0x”, the most compact representation (slight exception: zero should be represented as “0x0”). Examples: + * - 0x41 (65 in decimal) + * - 0x400 (1024 in decimal) + * - WRONG: 0x (should always have at least one digit - zero is “0x0”) + * - WRONG: 0x0400 (no leading zeroes allowed) + * - WRONG: ff (must be prefixed 0x) + */ +export function numToQuantity(num: number | bigint): QUANTITY { + return "0x" + num.toString(16); +} + +/** + * QUANTITY as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API + */ +export function quantityToNum(hex: QUANTITY, id = ""): number { + const num = parseInt(hex, 16); + if (Number.isNaN(num) || num < 0) throw Error(`Invalid hex decimal ${id} '${hex}'`); + return num; +} + +/** + * QUANTITY as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API. + * Typesafe fn to convert hex string to bigint. The BigInt constructor param is any + */ +export function quantityToBigint(hex: QUANTITY, id = ""): bigint { + try { + return BigInt(hex); + } catch (e) { + throw Error(`Invalid hex bigint ${id} '${hex}': ${(e as Error).message}`); + } +} + +/** + * QUANTITY as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API. + */ +export function quantityToBytes(hex: QUANTITY): Uint8Array { + const bn = quantityToBigint(hex); + return bigIntToBytes(bn, 32, "le"); +} + +/** + * QUANTITY as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API. + * Compress a 32 ByteVector into a QUANTITY + */ +export function bytesToQuantity(bytes: Uint8Array): QUANTITY { + const bn = bytesToBigInt(bytes, "le"); + return numToQuantity(bn); +} + +/** + * DATA as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API + * + * When encoding UNFORMATTED DATA (byte arrays, account addresses, hashes, bytecode arrays): encode as hex, prefix with + * “0x”, two hex digits per byte. Examples: + * + * - 0x41 (size 1, “A”) + * - 0x004200 (size 3, “\0B\0”) + * - 0x (size 0, “”) + * - WRONG: 0xf0f0f (must be even number of digits) + * - WRONG: 004200 (must be prefixed 0x) + */ +export function bytesToData(bytes: Uint8Array): DATA { + return toHex(bytes); +} + +/** + * DATA as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API + */ +export function dataToBytes(hex: DATA, fixedLength: number | null): Uint8Array { + try { + const bytes = fromHex(hex); + if (fixedLength != null && bytes.length !== fixedLength) { + throw Error(`Wrong data length ${bytes.length} expected ${fixedLength}`); + } + return bytes; + } catch (e) { + (e as Error).message = `Invalid hex string: ${(e as Error).message}`; + throw e; + } +} + +/** + * Convert DATA into a preallocated buffer + * fromHexInto will throw if buffer's length is not the same as the decoded hex length + */ +export function dataIntoBytes(hex: DATA, buffer: Uint8Array): Uint8Array { + fromHexInto(hex, buffer); + return buffer; +} export type JsonRpcBackend = { // biome-ignore lint/suspicious/noExplicitAny: We need to use `any` type here diff --git a/packages/beacon-node/src/index.ts b/packages/beacon-node/src/index.ts index 0791328b82..d85fc6c201 100644 --- a/packages/beacon-node/src/index.ts +++ b/packages/beacon-node/src/index.ts @@ -2,11 +2,10 @@ export type {RestApiServerMetrics, RestApiServerModules, RestApiServerOpts} from "./api/rest/base.js"; export {RestApiServer} from "./api/rest/base.js"; -export {checkAndPersistAnchorState, initStateFromDb, initStateFromEth1} from "./chain/index.js"; +export {checkAndPersistAnchorState, initStateFromDb} from "./chain/index.js"; export {DbCPStateDatastore} from "./chain/stateCache/datastore/db.js"; export {FileCPStateDatastore} from "./chain/stateCache/datastore/file.js"; export {BeaconDb, type IBeaconDb} from "./db/index.js"; -export {Eth1Provider, type IEth1Provider} from "./eth1/index.js"; // Export metrics utilities to de-duplicate validator metrics export { type HttpMetricsServer, diff --git a/packages/beacon-node/src/metrics/metrics/lodestar.ts b/packages/beacon-node/src/metrics/metrics/lodestar.ts index 7e352e318e..dd3b96093a 100644 --- a/packages/beacon-node/src/metrics/metrics/lodestar.ts +++ b/packages/beacon-node/src/metrics/metrics/lodestar.ts @@ -1619,98 +1619,6 @@ export function createLodestarMetrics( }), }, - eth1: { - depositTrackerIsCaughtup: register.gauge({ - name: "lodestar_eth1_deposit_tracker_is_caughtup", - help: "Eth1 deposit is caught up 0=false 1=true", - }), - depositTrackerUpdateErrors: register.gauge({ - name: "lodestar_eth1_deposit_tracker_update_errors_total", - help: "Eth1 deposit update loop errors total", - }), - remoteHighestBlock: register.gauge({ - name: "lodestar_eth1_remote_highest_block", - help: "Eth1 current highest block number", - }), - depositEventsFetched: register.gauge({ - name: "lodestar_eth1_deposit_events_fetched_total", - help: "Eth1 deposit events fetched total", - }), - lastProcessedDepositBlockNumber: register.gauge({ - name: "lodestar_eth1_last_processed_deposit_block_number", - help: "Eth1 deposit tracker lastProcessedDepositBlockNumber", - }), - blocksFetched: register.gauge({ - name: "lodestar_eth1_blocks_fetched_total", - help: "Eth1 blocks fetched total", - }), - lastFetchedBlockBlockNumber: register.gauge({ - name: "lodestar_eth1_last_fetched_block_block_number", - help: "Eth1 deposit tracker last fetched block's block number", - }), - lastFetchedBlockTimestamp: register.gauge({ - name: "lodestar_eth1_last_fetched_block_timestamp", - help: "Eth1 deposit tracker last fetched block's timestamp", - }), - eth1FollowDistanceSecondsConfig: register.gauge({ - name: "lodestar_eth1_follow_distance_seconds_config", - help: "Constant with value = SECONDS_PER_ETH1_BLOCK * ETH1_FOLLOW_DISTANCE", - }), - eth1FollowDistanceDynamic: register.gauge({ - name: "lodestar_eth1_follow_distance_dynamic", - help: "Eth1 dynamic follow distance changed by the deposit tracker if blocks are slow", - }), - eth1GetBlocksBatchSizeDynamic: register.gauge({ - name: "lodestar_eth1_blocks_batch_size_dynamic", - help: "Dynamic batch size to fetch blocks", - }), - eth1GetLogsBatchSizeDynamic: register.gauge({ - name: "lodestar_eth1_logs_batch_size_dynamic", - help: "Dynamic batch size to fetch deposit logs", - }), - }, - - eth1HttpClient: { - requestTime: register.histogram<{routeId: string}>({ - name: "lodestar_eth1_http_client_request_time_seconds", - help: "eth1 JsonHttpClient - histogram or roundtrip request times", - labelNames: ["routeId"], - // Provide max resolution on problematic values around 1 second - buckets: [0.1, 0.5, 1, 2, 5, 15], - }), - streamTime: register.histogram<{routeId: string}>({ - name: "lodestar_eth1_http_client_stream_time_seconds", - help: "eth1 JsonHttpClient - streaming time by routeId", - labelNames: ["routeId"], - // Provide max resolution on problematic values around 1 second - buckets: [0.1, 0.5, 1, 2, 5, 15], - }), - requestErrors: register.gauge<{routeId: string}>({ - name: "lodestar_eth1_http_client_request_errors_total", - help: "eth1 JsonHttpClient - total count of request errors", - labelNames: ["routeId"], - }), - retryCount: register.gauge<{routeId: string}>({ - name: "lodestar_eth1_http_client_request_retries_total", - help: "eth1 JsonHttpClient - total count of request retries", - labelNames: ["routeId"], - }), - requestUsedFallbackUrl: register.gauge<{routeId: string}>({ - name: "lodestar_eth1_http_client_request_used_fallback_url_total", - help: "eth1 JsonHttpClient - total count of requests on fallback url(s)", - labelNames: ["routeId"], - }), - activeRequests: register.gauge<{routeId: string}>({ - name: "lodestar_eth1_http_client_active_requests", - help: "eth1 JsonHttpClient - current count of active requests", - labelNames: ["routeId"], - }), - configUrlsCount: register.gauge({ - name: "lodestar_eth1_http_client_config_urls_count", - help: "eth1 JsonHttpClient - static config urls count", - }), - }, - executionEnginerHttpClient: { requestTime: register.histogram<{routeId: string}>({ name: "lodestar_execution_engine_http_client_request_time_seconds", diff --git a/packages/beacon-node/src/node/nodejs.ts b/packages/beacon-node/src/node/nodejs.ts index 0619534e23..dff23b8743 100644 --- a/packages/beacon-node/src/node/nodejs.ts +++ b/packages/beacon-node/src/node/nodejs.ts @@ -13,7 +13,6 @@ import {BeaconRestApiServer, getApi} from "../api/index.js"; import {BeaconChain, IBeaconChain, initBeaconMetrics} from "../chain/index.js"; import {ValidatorMonitor, createValidatorMonitor} from "../chain/validatorMonitor.js"; import {IBeaconDb} from "../db/index.js"; -import {initializeEth1ForBlockProduction} from "../eth1/index.js"; import {initializeExecutionBuilder, initializeExecutionEngine} from "../execution/index.js"; import {HttpMetricsServer, Metrics, createMetrics, getHttpMetricsServer} from "../metrics/index.js"; import {MonitoringService} from "../monitoring/index.js"; @@ -68,7 +67,6 @@ enum LoggerModule { api = "api", backfill = "backfill", chain = "chain", - eth1 = "eth1", execution = "execution", metrics = "metrics", monitoring = "monitoring", @@ -220,13 +218,6 @@ export class BeaconNode { validatorMonitor, anchorState, isAnchorStateFinalized, - eth1: initializeEth1ForBlockProduction(opts.eth1, { - config, - db, - metrics, - logger: logger.child({module: LoggerModule.eth1}), - signal, - }), executionEngine: initializeExecutionEngine(opts.executionEngine, { metrics, signal, diff --git a/packages/beacon-node/src/node/options.ts b/packages/beacon-node/src/node/options.ts index 6195294427..185ee5edaf 100644 --- a/packages/beacon-node/src/node/options.ts +++ b/packages/beacon-node/src/node/options.ts @@ -2,7 +2,6 @@ import {ApiOptions, defaultApiOptions} from "../api/options.js"; import {ArchiveMode, DEFAULT_ARCHIVE_MODE, IChainOptions, defaultChainOptions} from "../chain/options.js"; import {ValidatorMonitorOpts, defaultValidatorMonitorOpts} from "../chain/validatorMonitor.js"; import {DatabaseOptions, defaultDbOptions} from "../db/options.js"; -import {Eth1Options, defaultEth1Options} from "../eth1/options.js"; import { ExecutionBuilderOpts, ExecutionEngineOpts, @@ -26,7 +25,6 @@ export interface IBeaconNodeOptions { api: ApiOptions; chain: IChainOptions; db: DatabaseOptions; - eth1: Eth1Options; executionEngine: ExecutionEngineOpts; executionBuilder: ExecutionBuilderOpts; metrics: MetricsOptions; @@ -40,7 +38,6 @@ export const defaultOptions: IBeaconNodeOptions = { api: defaultApiOptions, chain: defaultChainOptions, db: defaultDbOptions, - eth1: defaultEth1Options, executionEngine: defaultExecutionEngineOpts, executionBuilder: defaultExecutionBuilderOpts, metrics: defaultMetricsOptions, diff --git a/packages/beacon-node/src/node/utils/interop/deposits.ts b/packages/beacon-node/src/node/utils/interop/deposits.ts index 21f78a6ec2..01989310f1 100644 --- a/packages/beacon-node/src/node/utils/interop/deposits.ts +++ b/packages/beacon-node/src/node/utils/interop/deposits.ts @@ -1,5 +1,6 @@ import {digest} from "@chainsafe/as-sha256"; import {Tree, toGindex} from "@chainsafe/persistent-merkle-tree"; +import {ByteVectorType, CompositeViewDU, ListCompositeType} from "@chainsafe/ssz"; import {ChainConfig} from "@lodestar/config"; import { BLS_WITHDRAWAL_PREFIX, @@ -9,7 +10,8 @@ import { } from "@lodestar/params"; import {ZERO_HASH, computeDomain, computeSigningRoot, interopSecretKeys} from "@lodestar/state-transition"; import {phase0, ssz} from "@lodestar/types"; -import {DepositTree} from "../../../db/repositories/depositDataRoot.js"; + +export type DepositTree = CompositeViewDU>; /** * Compute and return deposit data from other validators. diff --git a/packages/beacon-node/src/node/utils/interop/state.ts b/packages/beacon-node/src/node/utils/interop/state.ts index 64a44d09d8..2e159b9947 100644 --- a/packages/beacon-node/src/node/utils/interop/state.ts +++ b/packages/beacon-node/src/node/utils/interop/state.ts @@ -6,7 +6,7 @@ import { initializeBeaconStateFromEth1, } from "@lodestar/state-transition"; import {Bytes32, TimeSeconds, phase0, ssz, sszTypesFor} from "@lodestar/types"; -import {DepositTree} from "../../../db/repositories/depositDataRoot.js"; +import {DepositTree} from "./deposits.js"; export const INTEROP_BLOCK_HASH = Buffer.alloc(32, "B"); export const INTEROP_TIMESTAMP = Math.pow(2, 40); diff --git a/packages/beacon-node/src/node/utils/state.ts b/packages/beacon-node/src/node/utils/state.ts index ac0ff8101f..6f54fdaf33 100644 --- a/packages/beacon-node/src/node/utils/state.ts +++ b/packages/beacon-node/src/node/utils/state.ts @@ -1,7 +1,6 @@ import {ChainForkConfig} from "@lodestar/config"; import {BeaconStateAllForks} from "@lodestar/state-transition"; -import {phase0, ssz} from "@lodestar/types"; -import {IBeaconDb} from "../../db/index.js"; +import {ssz} from "@lodestar/types"; import {interopDeposits} from "./interop/deposits.js"; import {InteropStateOpts, getInteropState} from "./interop/state.js"; @@ -12,26 +11,12 @@ export function initDevState( config: ChainForkConfig, validatorCount: number, interopStateOpts: InteropStateOpts -): {deposits: phase0.Deposit[]; state: BeaconStateAllForks} { +): BeaconStateAllForks { const deposits = interopDeposits( config, ssz.phase0.DepositDataRootList.defaultViewDU(), validatorCount, interopStateOpts ); - const state = getInteropState(config, interopStateOpts, deposits); - return {deposits, state}; -} - -export async function writeDeposits(db: IBeaconDb, deposits: phase0.Deposit[]): Promise { - for (let i = 0; i < deposits.length; i++) { - await Promise.all([ - db.depositEvent.put(i, { - blockNumber: i, - index: i, - depositData: deposits[i].data, - }), - db.depositDataRoot.put(i, ssz.phase0.DepositData.hashTreeRoot(deposits[i].data)), - ]); - } + return getInteropState(config, interopStateOpts, deposits); } diff --git a/packages/beacon-node/test/e2e/eth1/eth1ForBlockProduction.test.ts b/packages/beacon-node/test/e2e/eth1/eth1ForBlockProduction.test.ts deleted file mode 100644 index 13f90699b5..0000000000 --- a/packages/beacon-node/test/e2e/eth1/eth1ForBlockProduction.test.ts +++ /dev/null @@ -1,118 +0,0 @@ -import {afterAll, beforeAll, describe, expect, it} from "vitest"; -import {fromHexString, toHexString} from "@chainsafe/ssz"; -import {KeyValue} from "@lodestar/db"; -import {LevelDbController} from "@lodestar/db/controller/level"; -import {phase0, ssz} from "@lodestar/types"; -import {sleep} from "@lodestar/utils"; -import {BeaconDb} from "../../../src/db/index.js"; -import {Eth1ForBlockProduction} from "../../../src/eth1/index.js"; -import {Eth1Options} from "../../../src/eth1/options.js"; -import {Eth1Provider} from "../../../src/eth1/provider/eth1Provider.js"; -import {getGoerliRpcUrl} from "../../testParams.js"; -import {createCachedBeaconStateTest} from "../../utils/cachedBeaconState.js"; -import {testLogger} from "../../utils/logger.js"; -import {generateState} from "../../utils/state.js"; -import {getTestnetConfig, medallaTestnetConfig} from "../../utils/testnet.js"; - -const dbLocation = "./.__testdb"; - -// First Pyrmont deposits deposit_data_root field -const pyrmontDepositsDataRoot = [ - // https://goerli.etherscan.io/tx/0x342d3551439a13555c62f95d27b2fbabc816e4c23a6e58c28e69af6fae6d0159 - "0x8976a7deec59f3ebcdcbd67f512fdd07a9a7cab72b63e85bc7a22bb689c2a40c", - // https://goerli.etherscan.io/tx/0x6bab2263e1801ae3ffd14a31c08602c17f0e105e8ab849855adbd661d8b87bfd - "0x61cef7d8a3f7c590a2dc066ae1c95def5ce769b3e9471fdb34f36f7a7246965e", -]; - -// https://github.com/ChainSafe/lodestar/issues/5967 -describe.skip("eth1 / Eth1Provider", () => { - const controller = new AbortController(); - - const config = getTestnetConfig(); - const logger = testLogger(); - - let db: BeaconDb; - - beforeAll(async () => { - // Nuke DB to make sure it's empty - await LevelDbController.destroy(dbLocation); - - db = new BeaconDb(config, await LevelDbController.create({name: dbLocation}, {logger})); - }); - - afterAll(async () => { - controller.abort(); - await db.close(); - await LevelDbController.destroy(dbLocation); - }); - - it("Should fetch real Pyrmont eth1 data for block proposing", async () => { - const eth1Options: Eth1Options = { - enabled: true, - providerUrls: [getGoerliRpcUrl()], - depositContractDeployBlock: medallaTestnetConfig.depositBlock, - unsafeAllowDepositDataOverwrite: false, - }; - const eth1Provider = new Eth1Provider(config, eth1Options, controller.signal); - - const eth1ForBlockProduction = new Eth1ForBlockProduction(eth1Options, { - config, - db, - metrics: null, - logger, - signal: controller.signal, - eth1Provider, - }); - - // Resolves when Eth1ForBlockProduction has fetched both blocks and deposits - const {eth1Datas, deposits} = await (async function resolveWithEth1DataAndDeposits() { - while (true) { - const eth1Datas = await db.eth1Data.entries(); - const deposits = await db.depositEvent.values(); - if (eth1Datas.length > 0 && deposits.length > 0) { - return {eth1Datas, deposits}; - } - await sleep(1000, controller.signal); - } - })(); - - // Generate mock state to query eth1 data for block proposing - if (eth1Datas.length === 0) throw Error("No eth1Datas"); - const {key: maxTimestamp, value: latestEth1Data} = eth1Datas.at(-1) as KeyValue; - - const {SECONDS_PER_ETH1_BLOCK, ETH1_FOLLOW_DISTANCE} = config; - // block.timestamp + SECONDS_PER_ETH1_BLOCK * ETH1_FOLLOW_DISTANCE <= period_start && ... - const periodStart = maxTimestamp + SECONDS_PER_ETH1_BLOCK * ETH1_FOLLOW_DISTANCE; - - // Compute correct deposit root tree - const depositRootTree = ssz.phase0.DepositDataRootList.toViewDU( - pyrmontDepositsDataRoot.map((root) => fromHexString(root)) - ); - - const tbState = generateState( - { - // Set genesis time and slot so latestEth1Data is considered - slot: 0, - genesisTime: periodStart, - // No deposits processed yet - // eth1_deposit_index represents the next deposit index to be added - eth1DepositIndex: 0, - // Set eth1Data with deposit length to return them - eth1Data: { - depositCount: deposits.length, - depositRoot: depositRootTree.hashTreeRoot(), - blockHash: Buffer.alloc(32), - }, - }, - config - ); - - const state = createCachedBeaconStateTest(tbState, config); - - const result = await eth1ForBlockProduction.getEth1DataAndDeposits(state); - expect(result.eth1Data).toEqual(latestEth1Data); - expect(result.deposits.map((deposit) => toHexString(ssz.phase0.DepositData.hashTreeRoot(deposit.data)))).toEqual( - pyrmontDepositsDataRoot - ); - }); -}); diff --git a/packages/beacon-node/test/e2e/eth1/eth1Provider.test.ts b/packages/beacon-node/test/e2e/eth1/eth1Provider.test.ts deleted file mode 100644 index 3fc876e1ca..0000000000 --- a/packages/beacon-node/test/e2e/eth1/eth1Provider.test.ts +++ /dev/null @@ -1,99 +0,0 @@ -import {afterEach, beforeEach, describe, expect, it} from "vitest"; -import {fromHexString} from "@chainsafe/ssz"; -import {Eth1Block} from "../../../src/eth1/interface.js"; -import {Eth1Options} from "../../../src/eth1/options.js"; -import {Eth1Provider, parseEth1Block} from "../../../src/eth1/provider/eth1Provider.js"; -import {getGoerliRpcUrl} from "../../testParams.js"; -import {getTestnetConfig, goerliTestnetDepositEvents} from "../../utils/testnet.js"; - -// https://github.com/ChainSafe/lodestar/issues/5967 -describe.skip("eth1 / Eth1Provider", () => { - let controller: AbortController; - beforeEach(() => { - controller = new AbortController(); - }); - afterEach(() => controller.abort()); - - const config = getTestnetConfig(); - - // Compute lazily since getGoerliRpcUrl() throws if GOERLI_RPC_URL is not set - function getEth1Provider(): Eth1Provider { - const eth1Options: Eth1Options = { - enabled: true, - providerUrls: [getGoerliRpcUrl()], - depositContractDeployBlock: 0, - unsafeAllowDepositDataOverwrite: false, - }; - return new Eth1Provider(config, eth1Options, controller.signal); - } - - it("Should validate contract", async () => { - await getEth1Provider().validateContract(); - }); - - it("Should get latest block number", async () => { - const blockNumber = await getEth1Provider().getBlockNumber(); - expect(blockNumber).toBeGreaterThan(0); - }); - - it("Should get a specific block by number", async () => { - const goerliGenesisBlock: Eth1Block = { - blockHash: fromHexString("0xbf7e331f7f7c1dd2e05159666b3bf8bc7a8a3a9eb1d518969eab529dd9b88c1a"), - blockNumber: 0, - timestamp: 1548854791, - }; - const block = await getEth1Provider().getBlockByNumber(goerliGenesisBlock.blockNumber); - expect(block && parseEth1Block(block)).toEqual(goerliGenesisBlock); - }); - - it("Should get deposits events for a block range", async () => { - const blockNumbers = goerliTestnetDepositEvents.map((log) => log.blockNumber); - const fromBlock = Math.min(...blockNumbers); - const toBlock = Math.min(...blockNumbers); - const depositEvents = await getEth1Provider().getDepositEvents(fromBlock, toBlock); - expect(depositEvents).toEqual(goerliTestnetDepositEvents); - }); - - // - - const firstGoerliBlocks: Eth1Block[] = [ - [0, 1548854791, "0xbf7e331f7f7c1dd2e05159666b3bf8bc7a8a3a9eb1d518969eab529dd9b88c1a"], - [1, 1548947453, "0x8f5bab218b6bb34476f51ca588e9f4553a3a7ce5e13a66c660a5283e97e9a85a"], - [2, 1548947468, "0xe675f1362d82cdd1ec260b16fb046c17f61d8a84808150f5d715ccce775f575e"], - [3, 1548947483, "0xd5daa825732729bb0d2fd187a1b888e6bfc890f1fc5333984740d9052afb2920"], - [4, 1548947498, "0xfe43c87178f0f87c2be161389aa2d35f3065d330bb596a6d9e01529706bf040d"], - ].map(([number, timestamp, hash]) => ({ - blockHash: fromHexString(hash as string), - blockNumber: number as number, - timestamp: timestamp as number, - })); - - const goerliSampleContract = { - address: "0x07b39F4fDE4A38bACe212b546dAc87C58DfE3fDC", - code: "0x60806040526004361061003f5760003560e01c806301ffc9a71461004457806322895118146100a", - }; - - it("getBlocksByNumber: Should fetch a block range", async () => { - const fromBlock = firstGoerliBlocks[0].blockNumber; - const toBlock = firstGoerliBlocks.at(-1)?.blockNumber as number; - const blocks = await getEth1Provider().getBlocksByNumber(fromBlock, toBlock); - expect(blocks.map(parseEth1Block)).toEqual(firstGoerliBlocks); - }); - - it("getBlockByNumber: Should fetch a single block", async () => { - const firstGoerliBlock = firstGoerliBlocks[0]; - const block = await getEth1Provider().getBlockByNumber(firstGoerliBlock.blockNumber); - expect(block && parseEth1Block(block)).toEqual(firstGoerliBlock); - }); - - it("getBlockNumber: Should fetch latest block number", async () => { - const blockNumber = await getEth1Provider().getBlockNumber(); - expect(blockNumber).toBeInstanceOf(Number); - expect(blockNumber).toBeGreaterThan(0); - }); - - it("getCode: Should fetch code for a contract", async () => { - const code = await getEth1Provider().getCode(goerliSampleContract.address); - expect(code).toEqual(expect.arrayContaining([goerliSampleContract.code])); - }); -}); diff --git a/packages/beacon-node/test/e2e/eth1/stream.test.ts b/packages/beacon-node/test/e2e/eth1/stream.test.ts deleted file mode 100644 index 1ec20608cf..0000000000 --- a/packages/beacon-node/test/e2e/eth1/stream.test.ts +++ /dev/null @@ -1,70 +0,0 @@ -import {afterEach, beforeEach, describe, expect, it} from "vitest"; -import {Eth1Options} from "../../../src/eth1/options.js"; -import {Eth1Provider} from "../../../src/eth1/provider/eth1Provider.js"; -import {getDepositsAndBlockStreamForGenesis, getDepositsStream} from "../../../src/eth1/stream.js"; -import {getGoerliRpcUrl} from "../../testParams.js"; -import {getTestnetConfig, medallaTestnetConfig} from "../../utils/testnet.js"; - -// https://github.com/ChainSafe/lodestar/issues/5967 -describe.skip("Eth1 streams", () => { - let controller: AbortController; - beforeEach(() => { - controller = new AbortController(); - }); - afterEach(() => controller.abort()); - - const config = getTestnetConfig(); - - // Compute lazily since getGoerliRpcUrl() throws if GOERLI_RPC_URL is not set - function getEth1Provider(): Eth1Provider { - const eth1Options: Eth1Options = { - enabled: true, - providerUrls: [getGoerliRpcUrl()], - depositContractDeployBlock: 0, - unsafeAllowDepositDataOverwrite: false, - }; - return new Eth1Provider(config, eth1Options, controller.signal); - } - - const maxBlocksPerPoll = 1000; - const depositsToFetch = 1000; - const eth1Params = {...config, maxBlocksPerPoll}; - - it(`Should fetch ${depositsToFetch} deposits with getDepositsStream`, async () => { - const depositsStream = getDepositsStream( - medallaTestnetConfig.blockWithDepositActivity, - getEth1Provider(), - eth1Params, - controller.signal - ); - - let depositCount = 0; - for await (const {depositEvents} of depositsStream) { - depositCount += depositEvents.length; - if (depositCount > depositsToFetch) { - break; - } - } - - expect(depositCount).toBeGreaterThan(depositsToFetch); - }); - - it(`Should fetch ${depositsToFetch} deposits with getDepositsAndBlockStreamForGenesis`, async () => { - const stream = getDepositsAndBlockStreamForGenesis( - medallaTestnetConfig.blockWithDepositActivity, - getEth1Provider(), - eth1Params, - controller.signal - ); - - let depositCount = 0; - for await (const [deposit] of stream) { - depositCount += deposit.length; - if (depositCount > depositsToFetch) { - break; - } - } - - expect(depositCount).toBeGreaterThan(depositsToFetch); - }); -}); diff --git a/packages/beacon-node/test/e2e/eth1/jsonRpcHttpClient.test.ts b/packages/beacon-node/test/e2e/execution/engine/jsonRpcHttpClient.test.ts similarity index 85% rename from packages/beacon-node/test/e2e/eth1/jsonRpcHttpClient.test.ts rename to packages/beacon-node/test/e2e/execution/engine/jsonRpcHttpClient.test.ts index b088db62f4..95180f18cb 100644 --- a/packages/beacon-node/test/e2e/eth1/jsonRpcHttpClient.test.ts +++ b/packages/beacon-node/test/e2e/execution/engine/jsonRpcHttpClient.test.ts @@ -2,11 +2,11 @@ import crypto from "node:crypto"; import http from "node:http"; import {afterEach, describe, expect, it, vi} from "vitest"; import {FetchError, sleep} from "@lodestar/utils"; -import {RpcPayload} from "../../../src/eth1/interface.js"; -import {JsonRpcHttpClient} from "../../../src/eth1/provider/jsonRpcHttpClient.js"; -import {getGoerliRpcUrl} from "../../testParams.js"; +import {JsonRpcHttpClient} from "../../../../src/execution/engine/jsonRpcHttpClient.js"; +import {RpcPayload} from "../../../../src/execution/engine/utils.js"; +import {getGoerliRpcUrl} from "../../../testParams.js"; -describe("eth1 / jsonRpcHttpClient", () => { +describe("execution / engine / jsonRpcHttpClient", () => { vi.setConfig({testTimeout: 10_000}); const port = 36421; @@ -145,10 +145,10 @@ describe("eth1 / jsonRpcHttpClient", () => { const controller = new AbortController(); if (abort) setTimeout(() => controller.abort(), 50); - const eth1JsonRpcClient = new JsonRpcHttpClient([url], {signal: controller.signal}); + const jsonRpcClient = new JsonRpcHttpClient([url], {signal: controller.signal}); try { - await eth1JsonRpcClient.fetch(payload, {timeout}); + await jsonRpcClient.fetch(payload, {timeout}); } catch (error) { if (testCase.errorCode) { expect((error as FetchError).code).toBe(testCase.errorCode); @@ -161,7 +161,7 @@ describe("eth1 / jsonRpcHttpClient", () => { } }); -describe("eth1 / jsonRpcHttpClient - with retries", () => { +describe("execution / engine / jsonRpcHttpClient - with retries", () => { vi.setConfig({testTimeout: 10_000}); const port = 36421; @@ -186,9 +186,9 @@ describe("eth1 / jsonRpcHttpClient - with retries", () => { const retries = 2; const controller = new AbortController(); - const eth1JsonRpcClient = new JsonRpcHttpClient([url], {signal: controller.signal}); + const jsonRpcClient = new JsonRpcHttpClient([url], {signal: controller.signal}); await expect( - eth1JsonRpcClient.fetchWithRetries(payload, { + jsonRpcClient.fetchWithRetries(payload, { retries, shouldRetry: () => { // using the shouldRetry function to keep tab of the retried requests @@ -208,9 +208,9 @@ describe("eth1 / jsonRpcHttpClient - with retries", () => { const retries = 2; const controller = new AbortController(); - const eth1JsonRpcClient = new JsonRpcHttpClient([url], {signal: controller.signal}); + const jsonRpcClient = new JsonRpcHttpClient([url], {signal: controller.signal}); await expect( - eth1JsonRpcClient.fetchWithRetries(payload, { + jsonRpcClient.fetchWithRetries(payload, { retries, shouldRetry: () => { // using the shouldRetry function to keep tab of the retried requests @@ -247,8 +247,8 @@ describe("eth1 / jsonRpcHttpClient - with retries", () => { const retries = 2; const controller = new AbortController(); - const eth1JsonRpcClient = new JsonRpcHttpClient([url], {signal: controller.signal}); - await expect(eth1JsonRpcClient.fetchWithRetries(payload, {retries})).rejects.toThrow("Not Found"); + const jsonRpcClient = new JsonRpcHttpClient([url], {signal: controller.signal}); + await expect(jsonRpcClient.fetchWithRetries(payload, {retries})).rejects.toThrow("Not Found"); expect(requestCount).toBeWithMessage(retries + 1, "404 responses should be retried before failing"); }); @@ -278,8 +278,8 @@ describe("eth1 / jsonRpcHttpClient - with retries", () => { const timeout = 200; const controller = new AbortController(); - const eth1JsonRpcClient = new JsonRpcHttpClient([url], {signal: controller.signal}); - await expect(eth1JsonRpcClient.fetchWithRetries(payload, {retries, timeout})).rejects.toThrow("Timeout request"); + const jsonRpcClient = new JsonRpcHttpClient([url], {signal: controller.signal}); + await expect(jsonRpcClient.fetchWithRetries(payload, {retries, timeout})).rejects.toThrow("Timeout request"); expect(requestCount).toBeWithMessage(retries + 1, "Timeout request should be retried before failing"); }); @@ -308,8 +308,8 @@ describe("eth1 / jsonRpcHttpClient - with retries", () => { const controller = new AbortController(); setTimeout(() => controller.abort(), 50); - const eth1JsonRpcClient = new JsonRpcHttpClient([url], {signal: controller.signal}); - await expect(eth1JsonRpcClient.fetchWithRetries(payload, {retries, timeout})).rejects.toThrow("Aborted"); + const jsonRpcClient = new JsonRpcHttpClient([url], {signal: controller.signal}); + await expect(jsonRpcClient.fetchWithRetries(payload, {retries, timeout})).rejects.toThrow("Aborted"); expect(requestCount).toBeWithMessage(1, "Aborted request should not be retried"); }); @@ -338,8 +338,8 @@ describe("eth1 / jsonRpcHttpClient - with retries", () => { const retries = 2; const controller = new AbortController(); - const eth1JsonRpcClient = new JsonRpcHttpClient([url], {signal: controller.signal}); - await expect(eth1JsonRpcClient.fetchWithRetries(payload, {retries})).rejects.toThrow("Method not found"); + const jsonRpcClient = new JsonRpcHttpClient([url], {signal: controller.signal}); + await expect(jsonRpcClient.fetchWithRetries(payload, {retries})).rejects.toThrow("Method not found"); expect(requestCount).toBeWithMessage(1, "Payload error (non-network error) should not be retried"); }); }); diff --git a/packages/beacon-node/test/e2e/interop/genesisState.test.ts b/packages/beacon-node/test/e2e/interop/genesisState.test.ts index 19c3a87fd9..b43a49f724 100644 --- a/packages/beacon-node/test/e2e/interop/genesisState.test.ts +++ b/packages/beacon-node/test/e2e/interop/genesisState.test.ts @@ -59,7 +59,7 @@ describe("interop / initDevState", () => { it("Create correct genesisState", () => { const validatorCount = 8; - const {state} = initDevState(config, validatorCount, { + const state = initDevState(config, validatorCount, { genesisTime: 1644000000, eth1BlockHash: Buffer.alloc(32, 0xaa), eth1Timestamp: 1644000000, diff --git a/packages/beacon-node/test/mocks/mockedBeaconChain.ts b/packages/beacon-node/test/mocks/mockedBeaconChain.ts index 5cc257feb5..2aadb8d27f 100644 --- a/packages/beacon-node/test/mocks/mockedBeaconChain.ts +++ b/packages/beacon-node/test/mocks/mockedBeaconChain.ts @@ -12,7 +12,6 @@ import {AggregatedAttestationPool, OpPool, SyncContributionAndProofPool} from ". import {QueuedStateRegenerator} from "../../src/chain/regen/index.js"; import {SeenBlockInput} from "../../src/chain/seenCache/seenGossipBlockInput.js"; import {ShufflingCache} from "../../src/chain/shufflingCache.js"; -import {Eth1ForBlockProduction} from "../../src/eth1/index.js"; import {ExecutionBuilderHttp} from "../../src/execution/builder/http.js"; import {ExecutionEngineHttp} from "../../src/execution/engine/index.js"; import {Clock} from "../../src/util/clock.js"; @@ -24,7 +23,6 @@ export type MockedBeaconChain = Mocked & { forkChoice: MockedForkChoice; executionEngine: Mocked; executionBuilder: Mocked; - eth1: Mocked; opPool: Mocked; aggregatedAttestationPool: Mocked; syncContributionAndProofPool: Mocked; @@ -73,7 +71,6 @@ vi.mock("@lodestar/fork-choice", async (importActual) => { }); vi.mock("../../src/chain/regen/index.js"); -vi.mock("../../src/eth1/index.js"); vi.mock("../../src/chain/beaconProposerCache.js"); vi.mock("../../src/chain/seenCache/seenGossipBlockInput.js"); vi.mock("../../src/chain/shufflingCache.js"); @@ -136,8 +133,6 @@ vi.mock("../../src/chain/chain.js", async (importActual) => { getClientVersion: vi.fn(), }, executionBuilder: {}, - // @ts-expect-error - eth1: new Eth1ForBlockProduction(), opPool: new OpPool(), aggregatedAttestationPool: new AggregatedAttestationPool(config), syncContributionAndProofPool: new SyncContributionAndProofPool(config, clock), diff --git a/packages/beacon-node/test/mocks/mockedBeaconDb.ts b/packages/beacon-node/test/mocks/mockedBeaconDb.ts index 72768d01d3..32c453da18 100644 --- a/packages/beacon-node/test/mocks/mockedBeaconDb.ts +++ b/packages/beacon-node/test/mocks/mockedBeaconDb.ts @@ -10,9 +10,6 @@ import { BlockRepository, DataColumnSidecarArchiveRepository, DataColumnSidecarRepository, - DepositDataRootRepository, - DepositEventRepository, - Eth1DataRepository, ProposerSlashingRepository, StateArchiveRepository, VoluntaryExitRepository, @@ -34,10 +31,6 @@ export type MockedBeaconDb = Mocked & { blsToExecutionChange: Mocked; proposerSlashing: Mocked; attesterSlashing: Mocked; - depositEvent: Mocked; - - depositDataRoot: Mocked; - eth1Data: Mocked; }; vi.mock("../../src/db/repositories/index.js"); @@ -55,10 +48,6 @@ vi.mock("../../src/db/index.js", async (importActual) => { blsToExecutionChange: vi.mocked(new BLSToExecutionChangeRepository({} as any, {} as any)), proposerSlashing: vi.mocked(new ProposerSlashingRepository({} as any, {} as any)), attesterSlashing: vi.mocked(new AttesterSlashingRepository({} as any, {} as any)), - depositEvent: vi.mocked(new DepositEventRepository({} as any, {} as any)), - - depositDataRoot: vi.mocked(new DepositDataRootRepository({} as any, {} as any)), - eth1Data: vi.mocked(new Eth1DataRepository({} as any, {} as any)), blobSidecars: vi.mocked(new BlobSidecarsRepository({} as any, {} as any)), blobSidecarsArchive: vi.mocked(new BlobSidecarsArchiveRepository({} as any, {} as any)), diff --git a/packages/beacon-node/test/perf/chain/produceBlock/produceBlockBody.test.ts b/packages/beacon-node/test/perf/chain/produceBlock/produceBlockBody.test.ts index 4d52b4a64d..26c685480d 100644 --- a/packages/beacon-node/test/perf/chain/produceBlock/produceBlockBody.test.ts +++ b/packages/beacon-node/test/perf/chain/produceBlock/produceBlockBody.test.ts @@ -8,7 +8,6 @@ import {defaultOptions as defaultValidatorOptions} from "@lodestar/validator"; import {generatePerfTestCachedStateAltair} from "../../../../../state-transition/test/perf/util.js"; import {BeaconChain} from "../../../../src/chain/index.js"; import {BlockType, produceBlockBody} from "../../../../src/chain/produceBlock/produceBlockBody.js"; -import {Eth1ForBlockProductionDisabled} from "../../../../src/eth1/index.js"; import {ExecutionEngineDisabled} from "../../../../src/execution/engine/index.js"; import {ArchiveMode, BeaconDb} from "../../../../src/index.js"; import {testLogger} from "../../../utils/logger.js"; @@ -49,7 +48,6 @@ describe("produceBlockBody", () => { validatorMonitor: null, anchorState: state, isAnchorStateFinalized: true, - eth1: new Eth1ForBlockProductionDisabled(), executionEngine: new ExecutionEngineDisabled(), } ); diff --git a/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts b/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts index e7cd6a1b2c..793b24564d 100644 --- a/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts +++ b/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts @@ -12,7 +12,6 @@ import {BlockInputPreData} from "../../../src/chain/blocks/blockInput/blockInput import {BlockInputSource} from "../../../src/chain/blocks/blockInput/types.js"; import {AttestationImportOpt} from "../../../src/chain/blocks/types.js"; import {BeaconChain} from "../../../src/chain/index.js"; -import {Eth1ForBlockProductionDisabled} from "../../../src/eth1/index.js"; import {ExecutionEngineDisabled} from "../../../src/execution/engine/index.js"; import {ArchiveMode, BeaconDb} from "../../../src/index.js"; import {linspace} from "../../../src/util/numpy.js"; @@ -101,7 +100,6 @@ describe.skip("verify+import blocks - range sync perf test", () => { validatorMonitor: null, anchorState: state, isAnchorStateFinalized: true, - eth1: new Eth1ForBlockProductionDisabled(), executionEngine: new ExecutionEngineDisabled(), } ); diff --git a/packages/beacon-node/test/perf/eth1/pickEth1Vote.test.ts b/packages/beacon-node/test/perf/eth1/pickEth1Vote.test.ts deleted file mode 100644 index ca092a60e0..0000000000 --- a/packages/beacon-node/test/perf/eth1/pickEth1Vote.test.ts +++ /dev/null @@ -1,98 +0,0 @@ -import {bench, describe, setBenchOpts} from "@chainsafe/benchmark"; -import {ContainerType, ListCompositeType} from "@chainsafe/ssz"; -import {BeaconStateAllForks, newFilledArray} from "@lodestar/state-transition"; -import {phase0, ssz} from "@lodestar/types"; -import {fastSerializeEth1Data, pickEth1Vote} from "../../../src/eth1/utils/eth1Vote.js"; - -describe("eth1 / pickEth1Vote", () => { - const ETH1_FOLLOW_DISTANCE_MAINNET = 2048; - const EPOCHS_PER_ETH1_VOTING_PERIOD_MAINNET = 64; - const SLOTS_PER_EPOCH_MAINNET = 32; - const eth1DataVotesLimit = EPOCHS_PER_ETH1_VOTING_PERIOD_MAINNET * SLOTS_PER_EPOCH_MAINNET; - - const stateMainnetType = new ContainerType({ - eth1DataVotes: new ListCompositeType(ssz.phase0.Eth1Data, eth1DataVotesLimit), - }); - - const stateNoVotes = stateMainnetType.defaultViewDU(); - const stateMaxVotes = stateMainnetType.defaultViewDU(); - - // Must convert all instances to create a cache - stateMaxVotes.eth1DataVotes = ssz.phase0.Eth1DataVotes.toViewDU( - newFilledArray(eth1DataVotesLimit, { - depositRoot: Buffer.alloc(32, 0xdd), - // All votes are the same - depositCount: 1e6, - blockHash: Buffer.alloc(32, 0xdd), - }) - ); - stateMaxVotes.commit(); - - // votesToConsider range: - // lte: periodStart - SECONDS_PER_ETH1_BLOCK * ETH1_FOLLOW_DISTANCE, - // gte: periodStart - SECONDS_PER_ETH1_BLOCK * ETH1_FOLLOW_DISTANCE * 2, - const votesToConsider = Array.from({length: ETH1_FOLLOW_DISTANCE_MAINNET}, (_, i) => ({ - depositRoot: Buffer.alloc(32, 0xdd), - // Each eth1Data is different - depositCount: 1e6 + i, - blockHash: Buffer.alloc(32, 0xdd), - })); - - bench("pickEth1Vote - no votes", () => { - pickEth1Vote(stateNoVotes as unknown as BeaconStateAllForks, votesToConsider); - }); - - bench("pickEth1Vote - max votes", () => { - pickEth1Vote(stateMaxVotes as unknown as BeaconStateAllForks, votesToConsider); - }); -}); - -// Results in Linux Feb 2022 -// -// eth1 / pickEth1Vote serializers -// ✓ pickEth1Vote - Eth1Data hashTreeRoot value x2048 58.45559 ops/s 17.10700 ms/op - 45 runs 1.27 s -// ✓ pickEth1Vote - Eth1Data hashTreeRoot tree x2048 122.1150 ops/s 8.189003 ms/op - 65 runs 1.75 s -// ✓ pickEth1Vote - Eth1Data fastSerialize value x2048 533.9807 ops/s 1.872727 ms/op - 272 runs 1.01 s -// ✓ pickEth1Vote - Eth1Data fastSerialize tree x2048 59.49406 ops/s 16.80840 ms/op - 60 runs 1.51 s - -describe("eth1 / pickEth1Vote serializers", () => { - setBenchOpts({noThreshold: true}); - - const ETH1_FOLLOW_DISTANCE_MAINNET = 2048; - const eth1DataValue: phase0.Eth1Data = { - depositRoot: Buffer.alloc(32, 0xdd), - depositCount: 1e6, - blockHash: Buffer.alloc(32, 0xdd), - }; - const eth1DataTree = ssz.phase0.Eth1Data.toViewDU(eth1DataValue); - - bench(`pickEth1Vote - Eth1Data hashTreeRoot value x${ETH1_FOLLOW_DISTANCE_MAINNET}`, () => { - for (let i = 0; i < ETH1_FOLLOW_DISTANCE_MAINNET; i++) { - ssz.phase0.Eth1Data.hashTreeRoot(eth1DataValue); - } - }); - - // Create new copies of eth1DataTree to drop the hashing cache - bench({ - id: `pickEth1Vote - Eth1Data hashTreeRoot tree x${ETH1_FOLLOW_DISTANCE_MAINNET}`, - beforeEach: () => - Array.from({length: ETH1_FOLLOW_DISTANCE_MAINNET}, () => ssz.phase0.Eth1Data.toViewDU(eth1DataValue)), - fn: (eth1DataTrees) => { - for (let i = 0; i < eth1DataTrees.length; i++) { - ssz.phase0.Eth1Data.hashTreeRoot(eth1DataTrees[i]); - } - }, - }); - - bench(`pickEth1Vote - Eth1Data fastSerialize value x${ETH1_FOLLOW_DISTANCE_MAINNET}`, () => { - for (let i = 0; i < ETH1_FOLLOW_DISTANCE_MAINNET; i++) { - fastSerializeEth1Data(eth1DataValue); - } - }); - - bench(`pickEth1Vote - Eth1Data fastSerialize tree x${ETH1_FOLLOW_DISTANCE_MAINNET}`, () => { - for (let i = 0; i < ETH1_FOLLOW_DISTANCE_MAINNET; i++) { - fastSerializeEth1Data(eth1DataTree); - } - }); -}); diff --git a/packages/beacon-node/test/sim/electra-interop.test.ts b/packages/beacon-node/test/sim/electra-interop.test.ts index 6166df8cd2..eec3fd1e35 100644 --- a/packages/beacon-node/test/sim/electra-interop.test.ts +++ b/packages/beacon-node/test/sim/electra-interop.test.ts @@ -8,11 +8,10 @@ import {CachedBeaconStateElectra} from "@lodestar/state-transition"; import {Epoch, Slot, electra} from "@lodestar/types"; import {LogLevel, sleep} from "@lodestar/utils"; import {ValidatorProposerConfig} from "@lodestar/validator"; -import {bytesToData} from "../../lib/eth1/provider/utils.js"; import {BeaconRestApiServerOpts} from "../../src/api/index.js"; -import {dataToBytes} from "../../src/eth1/provider/utils.js"; import {defaultExecutionEngineHttpOpts} from "../../src/execution/engine/http.js"; import {ExecutionPayloadStatus, PayloadAttributes} from "../../src/execution/engine/interface.js"; +import {bytesToData, dataToBytes} from "../../src/execution/engine/utils.js"; import {initializeExecutionEngine} from "../../src/execution/index.js"; import {BeaconNode} from "../../src/index.js"; import {ClockEvent} from "../../src/util/clock.js"; @@ -311,8 +310,6 @@ describe("executionEngine / ExecutionEngineHttp", () => { api: {rest: {enabled: true} as BeaconRestApiServerOpts}, sync: {isSingleNode: true}, network: {allowPublishToZeroPeers: true, discv5: null}, - // Now eth deposit/merge tracker methods directly available on engine endpoints - eth1: {enabled: false, providerUrls: [engineRpcUrl], jwtSecretHex}, executionEngine: {urls: [engineRpcUrl], jwtSecretHex}, chain: {suggestedFeeRecipient: "0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"}, }, diff --git a/packages/beacon-node/test/spec/presets/fork_choice.test.ts b/packages/beacon-node/test/spec/presets/fork_choice.test.ts index 33bfb100c7..dedb898934 100644 --- a/packages/beacon-node/test/spec/presets/fork_choice.test.ts +++ b/packages/beacon-node/test/spec/presets/fork_choice.test.ts @@ -38,7 +38,6 @@ import {BeaconChain, ChainEvent} from "../../../src/chain/index.js"; import {defaultChainOptions} from "../../../src/chain/options.js"; import {validateBlockDataColumnSidecars} from "../../../src/chain/validation/dataColumnSidecar.js"; import {ZERO_HASH_HEX} from "../../../src/constants/constants.js"; -import {Eth1ForBlockProductionDisabled} from "../../../src/eth1/index.js"; import {ExecutionPayloadStatus} from "../../../src/execution/engine/interface.js"; import {ExecutionEngineMockBackend} from "../../../src/execution/engine/mock.js"; import {getExecutionEngineFromBackend} from "../../../src/execution/index.js"; @@ -77,7 +76,6 @@ const forkChoiceTest = /** This is to track test's tickTime to be used in proposer boost */ let tickTime = 0; const clock = new ClockStopped(currentSlot); - const eth1 = new Eth1ForBlockProductionDisabled(); const executionEngineBackend = new ExecutionEngineMockBackend({ onlyPredefinedResponses: opts.onlyPredefinedResponses, genesisBlockHash: isExecutionStateType(anchorState) @@ -124,7 +122,6 @@ const forkChoiceTest = validatorMonitor: null, anchorState, isAnchorStateFinalized: true, - eth1, executionEngine, executionBuilder: undefined, } diff --git a/packages/beacon-node/test/unit-minimal/chain/genesis/genesis.test.ts b/packages/beacon-node/test/unit-minimal/chain/genesis/genesis.test.ts deleted file mode 100644 index 08567bf772..0000000000 --- a/packages/beacon-node/test/unit-minimal/chain/genesis/genesis.test.ts +++ /dev/null @@ -1,120 +0,0 @@ -import {describe, expect, it} from "vitest"; -import {PublicKey, SecretKey} from "@chainsafe/blst"; -import {toHexString} from "@chainsafe/ssz"; -import {config} from "@lodestar/config/default"; -import {DOMAIN_DEPOSIT, MAX_EFFECTIVE_BALANCE} from "@lodestar/params"; -import {ZERO_HASH, computeDomain, computeSigningRoot, interopSecretKey} from "@lodestar/state-transition"; -import {ValidatorIndex, phase0, ssz} from "@lodestar/types"; -import {ErrorAborted} from "@lodestar/utils"; -import {GenesisBuilder} from "../../../../src/chain/genesis/genesis.js"; -import {ZERO_HASH_HEX} from "../../../../src/constants/index.js"; -import {Eth1ProviderState, EthJsonRpcBlockRaw, IEth1Provider} from "../../../../src/eth1/interface.js"; -import {testLogger} from "../../../utils/logger.js"; - -describe("genesis builder", () => { - const logger = testLogger(); - const schlesiConfig = Object.assign({}, config, { - MIN_GENESIS_TIME: 1587755000, - MIN_GENESIS_ACTIVE_VALIDATOR_COUNT: 4, - MIN_GENESIS_DELAY: 3600, - }); - - type MockData = {events: phase0.DepositEvent[]; blocks: EthJsonRpcBlockRaw[]}; - - function generateGenesisBuilderMockData(): MockData { - const events: phase0.DepositEvent[] = []; - const blocks: EthJsonRpcBlockRaw[] = []; - - for (let i = 0; i < schlesiConfig.MIN_GENESIS_ACTIVE_VALIDATOR_COUNT; i++) { - const secretKey = interopSecretKey(i); - const publicKey = secretKey.toPublicKey(); - const event: phase0.DepositEvent = { - depositData: generateDeposit(i, secretKey, publicKey), - index: i, - blockNumber: i, - }; - events.push(event); - // All blocks satisfy MIN_GENESIS_TIME, so genesis will happen when the min validator count is reached - blocks.push({ - number: i.toString(16), - hash: ZERO_HASH_HEX, - timestamp: schlesiConfig.MIN_GENESIS_TIME + i.toString(16), - // Extra un-used data for this test - parentHash: "0x0", - totalDifficulty: "0x0", - }); - } - - return {events, blocks}; - } - - function getMockEth1Provider({events, blocks}: MockData, eth1Provider?: Partial): IEth1Provider { - return { - deployBlock: events[0].blockNumber, - getBlockNumber: async () => 2000, - getBlockByNumber: async (number) => blocks[number as number], - getBlocksByNumber: async (fromBlock, toBlock) => - blocks.filter((b) => parseInt(b.number) >= fromBlock && parseInt(b.number) <= toBlock), - getBlockByHash: async () => null, - getDepositEvents: async (fromBlock, toBlock) => - events.filter((e) => e.blockNumber >= fromBlock && e.blockNumber <= toBlock), - validateContract: async () => { - return; - }, - getState: () => Eth1ProviderState.ONLINE, - ...eth1Provider, - }; - } - - it("should build genesis state", async () => { - const mockData = generateGenesisBuilderMockData(); - const eth1Provider = getMockEth1Provider(mockData); - - const genesisBuilder = new GenesisBuilder({ - config: schlesiConfig, - eth1Provider, - logger, - maxBlocksPerPoll: 1, - }); - - const {state} = await genesisBuilder.waitForGenesis(); - - expect(state.validators.length).toBe(schlesiConfig.MIN_GENESIS_ACTIVE_VALIDATOR_COUNT); - expect(toHexString(state.eth1Data.blockHash)).toBe( - mockData.blocks[schlesiConfig.MIN_GENESIS_ACTIVE_VALIDATOR_COUNT - 1].hash - ); - }); - - it("should abort building genesis state", async () => { - const mockData = generateGenesisBuilderMockData(); - const controller = new AbortController(); - const eth1Provider = getMockEth1Provider(mockData, { - getDepositEvents: async (fromBlock, toBlock) => { - controller.abort(); - return mockData.events.filter((e) => e.blockNumber >= fromBlock && e.blockNumber <= toBlock); - }, - }); - - const genesisBuilder = new GenesisBuilder({ - config: schlesiConfig, - eth1Provider, - logger, - signal: controller.signal, - maxBlocksPerPoll: 1, - }); - - await expect(genesisBuilder.waitForGenesis()).rejects.toThrow(ErrorAborted); - }); -}); - -function generateDeposit(index: ValidatorIndex, secretKey: SecretKey, publicKey: PublicKey): phase0.DepositData { - const domain = computeDomain(DOMAIN_DEPOSIT, config.GENESIS_FORK_VERSION, ZERO_HASH); - const depositMessage = { - pubkey: publicKey.toBytes(), - withdrawalCredentials: Buffer.alloc(32, index), - amount: MAX_EFFECTIVE_BALANCE, - }; - const signingRoot = computeSigningRoot(ssz.phase0.DepositMessage, depositMessage, domain); - const signature = secretKey.sign(signingRoot); - return {...depositMessage, signature: signature.toBytes()}; -} diff --git a/packages/beacon-node/test/unit/api/impl/validator/produceBlockV3.test.ts b/packages/beacon-node/test/unit/api/impl/validator/produceBlockV3.test.ts index 69497cd05e..71deb87f27 100644 --- a/packages/beacon-node/test/unit/api/impl/validator/produceBlockV3.test.ts +++ b/packages/beacon-node/test/unit/api/impl/validator/produceBlockV3.test.ts @@ -245,10 +245,6 @@ describe("api/validator - produceBlockV3", () => { modules.chain.recomputeForkChoiceHead.mockReturnValue(generateProtoBlock({slot: headSlot})); modules.chain["opPool"].getSlashingsAndExits.mockReturnValue([[], [], [], []]); modules.chain["aggregatedAttestationPool"].getAttestationsForBlock.mockReturnValue([]); - modules.chain["eth1"].getEth1DataAndDeposits.mockResolvedValue({ - eth1Data: ssz.phase0.Eth1Data.defaultValue(), - deposits: [], - }); modules.chain["syncContributionAndProofPool"].getAggregate.mockReturnValue({ syncCommitteeBits: ssz.altair.SyncCommitteeBits.defaultValue(), syncCommitteeSignature: G2_POINT_AT_INFINITY, diff --git a/packages/beacon-node/test/unit/db/api/repository.test.ts b/packages/beacon-node/test/unit/db/api/repository.test.ts index 7702a72d70..b75d62e123 100644 --- a/packages/beacon-node/test/unit/db/api/repository.test.ts +++ b/packages/beacon-node/test/unit/db/api/repository.test.ts @@ -38,7 +38,7 @@ const TestSSZType = new ContainerType({ class TestRepository extends Repository { constructor(db: Db) { - super(config, db, Bucket.phase0_depositEvent, TestSSZType, "phase0_depositEvent"); + super(config, db, Bucket.phase0_exit, TestSSZType, "phase0_exit"); } } diff --git a/packages/beacon-node/test/unit/eth1/eth1DepositDataTracker.test.ts b/packages/beacon-node/test/unit/eth1/eth1DepositDataTracker.test.ts deleted file mode 100644 index 941677b615..0000000000 --- a/packages/beacon-node/test/unit/eth1/eth1DepositDataTracker.test.ts +++ /dev/null @@ -1,91 +0,0 @@ -import {MockInstance, afterEach, beforeEach, describe, expect, it, vi} from "vitest"; -import {config} from "@lodestar/config/default"; -import {TimeoutError} from "@lodestar/utils"; -import {BeaconDb} from "../../../src/db/beacon.js"; -import {Eth1DepositDataTracker} from "../../../src/eth1/eth1DepositDataTracker.js"; -import {defaultEth1Options} from "../../../src/eth1/options.js"; -import {Eth1Provider} from "../../../src/eth1/provider/eth1Provider.js"; -import {getMockedBeaconDb} from "../../mocks/mockedBeaconDb.js"; -import {testLogger} from "../../utils/logger.js"; - -describe("Eth1DepositDataTracker", () => { - const controller = new AbortController(); - - const logger = testLogger(); - const opts = {...defaultEth1Options, enabled: false}; - const signal = controller.signal; - const eth1Provider = new Eth1Provider(config, opts, signal, null); - let db: BeaconDb; - let eth1DepositDataTracker: Eth1DepositDataTracker; - let getBlocksByNumberStub: MockInstance; - let getDepositEventsStub: MockInstance; - - beforeEach(() => { - db = getMockedBeaconDb(); - eth1DepositDataTracker = new Eth1DepositDataTracker( - opts, - {config, db, logger, signal, metrics: null}, - eth1Provider - ); - vi.spyOn(Eth1DepositDataTracker.prototype as any, "getLastProcessedDepositBlockNumber").mockResolvedValue(0); - vi.spyOn(eth1DepositDataTracker["eth1DataCache"], "getHighestCachedBlockNumber").mockResolvedValue(0); - vi.spyOn(eth1DepositDataTracker["eth1DataCache"], "add").mockResolvedValue(void 0); - - vi.spyOn(eth1DepositDataTracker["depositsCache"], "getEth1DataForBlocks").mockResolvedValue([]); - vi.spyOn(eth1DepositDataTracker["depositsCache"], "add").mockResolvedValue(void 0); - vi.spyOn(eth1DepositDataTracker["depositsCache"], "getLowestDepositEventBlockNumber").mockResolvedValue(0); - - getBlocksByNumberStub = vi.spyOn(eth1Provider, "getBlocksByNumber"); - getDepositEventsStub = vi.spyOn(eth1Provider, "getDepositEvents"); - }); - - afterEach(() => { - vi.clearAllMocks(); - }); - - it("Should dynamically adjust blocks batch size", async () => { - let expectedSize = 1000; - expect(eth1DepositDataTracker["eth1GetBlocksBatchSizeDynamic"]).toBe(expectedSize); - - // If there are timeerrors or parse errors then batch size should reduce - getBlocksByNumberStub.mockRejectedValue(new TimeoutError("timeout error")); - for (let i = 0; i < 10; i++) { - expectedSize = Math.max(Math.floor(expectedSize / 2), 10); - await eth1DepositDataTracker["updateBlockCache"](3000).catch((_e) => void 0); - expect(eth1DepositDataTracker["eth1GetBlocksBatchSizeDynamic"]).toBe(expectedSize); - } - expect(expectedSize).toBe(10); - - getBlocksByNumberStub.mockResolvedValue([]); - // Should take a whole longer to get back to the orignal batch size - for (let i = 0; i < 100; i++) { - expectedSize = Math.min(expectedSize + 10, 1000); - await eth1DepositDataTracker["updateBlockCache"](3000); - expect(eth1DepositDataTracker["eth1GetBlocksBatchSizeDynamic"]).toBe(expectedSize); - } - expect(expectedSize).toBe(1000); - }); - - it("Should dynamically adjust logs batch size", async () => { - let expectedSize = 1000; - expect(eth1DepositDataTracker["eth1GetLogsBatchSizeDynamic"]).toBe(expectedSize); - - // If there are timeerrors or parse errors then batch size should reduce - getDepositEventsStub.mockRejectedValue(new TimeoutError("timeout error")); - for (let i = 0; i < 10; i++) { - expectedSize = Math.max(Math.floor(expectedSize / 2), 10); - await eth1DepositDataTracker["updateDepositCache"](3000).catch((_e) => void 0); - expect(eth1DepositDataTracker["eth1GetLogsBatchSizeDynamic"]).toBe(expectedSize); - } - expect(expectedSize).toBe(10); - - getDepositEventsStub.mockResolvedValue([]); - // Should take a whole longer to get back to the orignal batch size - for (let i = 0; i < 100; i++) { - expectedSize = Math.min(expectedSize + 10, 1000); - await eth1DepositDataTracker["updateDepositCache"](3000); - expect(eth1DepositDataTracker["eth1GetLogsBatchSizeDynamic"]).toBe(expectedSize); - } - expect(expectedSize).toBe(1000); - }); -}); diff --git a/packages/beacon-node/test/unit/eth1/utils/depositContract.test.ts b/packages/beacon-node/test/unit/eth1/utils/depositContract.test.ts deleted file mode 100644 index 260cb4fef4..0000000000 --- a/packages/beacon-node/test/unit/eth1/utils/depositContract.test.ts +++ /dev/null @@ -1,10 +0,0 @@ -import {describe, expect, it} from "vitest"; -import {parseDepositLog} from "../../../../src/eth1/utils/depositContract.js"; -import {goerliTestnetDepositEvents, goerliTestnetLogs} from "../../../utils/testnet.js"; - -describe("eth1 / util / depositContract", () => { - it("Should parse a raw deposit log", () => { - const depositEvents = goerliTestnetLogs.map((log) => parseDepositLog(log)); - expect(depositEvents).toEqual(goerliTestnetDepositEvents); - }); -}); diff --git a/packages/beacon-node/test/unit/eth1/utils/deposits.test.ts b/packages/beacon-node/test/unit/eth1/utils/deposits.test.ts deleted file mode 100644 index b1a8b9a9d7..0000000000 --- a/packages/beacon-node/test/unit/eth1/utils/deposits.test.ts +++ /dev/null @@ -1,208 +0,0 @@ -import {describe, expect, it} from "vitest"; -import {createChainForkConfig} from "@lodestar/config"; -import {MAX_DEPOSITS, SLOTS_PER_EPOCH} from "@lodestar/params"; -import {phase0, ssz} from "@lodestar/types"; -import {verifyMerkleBranch} from "@lodestar/utils"; -import {DepositTree} from "../../../../src/db/repositories/depositDataRoot.js"; -import {Eth1ErrorCode} from "../../../../src/eth1/errors.js"; -import {DepositGetter, getDeposits, getDepositsWithProofs} from "../../../../src/eth1/utils/deposits.js"; -import {createCachedBeaconStateTest} from "../../../utils/cachedBeaconState.js"; -import {filterBy} from "../../../utils/db.js"; -import {expectRejectedWithLodestarError} from "../../../utils/errors.js"; -import {generateState} from "../../../utils/state.js"; - -describe("eth1 / util / deposits", () => { - describe("getDeposits", () => { - type TestCase = { - id: string; - depositCount: number; - eth1DepositIndex: number; - depositIndexes: number[]; - expectedReturnedIndexes?: number[]; - error?: Eth1ErrorCode; - postElectra?: boolean; - }; - - const testCases: TestCase[] = [ - { - id: "Return first deposit", - depositCount: 1, - eth1DepositIndex: 0, - depositIndexes: [0, 1, 2, 3], - expectedReturnedIndexes: [0], - }, - { - id: "Return second and third deposit", - depositCount: 3, - eth1DepositIndex: 1, - depositIndexes: [0, 1, 2, 3], - expectedReturnedIndexes: [1, 2], - }, - { - id: "No deposits to be included", - depositCount: 3, - eth1DepositIndex: 3, - depositIndexes: [0, 1, 2, 3], - expectedReturnedIndexes: [], - }, - { - id: "Limit deposits to MAX_DEPOSITS", - depositCount: 10 * MAX_DEPOSITS, - eth1DepositIndex: 0, - depositIndexes: Array.from({length: 10 * MAX_DEPOSITS}, (_, i) => i), - expectedReturnedIndexes: Array.from({length: MAX_DEPOSITS}, (_, i) => i), - }, - { - id: "Should throw if depositIndex > depositCount", - depositCount: 0, - eth1DepositIndex: 1, - depositIndexes: [], - error: Eth1ErrorCode.DEPOSIT_INDEX_TOO_HIGH, - }, - { - id: "Should throw if DB returns less deposits than expected", - depositCount: 1, - eth1DepositIndex: 0, - depositIndexes: [], - error: Eth1ErrorCode.NOT_ENOUGH_DEPOSITS, - }, - { - id: "Empty case", - depositCount: 0, - eth1DepositIndex: 0, - depositIndexes: [], - expectedReturnedIndexes: [], - }, - { - id: "No deposits to be included post Electra after deposit_requests_start_index", - depositCount: 2030, - eth1DepositIndex: 2025, - depositIndexes: Array.from({length: 2030}, (_, i) => i), - expectedReturnedIndexes: [], - postElectra: true, - }, - { - id: "Should return deposits post Electra before deposit_requests_start_index", - depositCount: 2022, - eth1DepositIndex: 2018, - depositIndexes: Array.from({length: 2022}, (_, i) => i), - expectedReturnedIndexes: [2018, 2019, 2020, 2021], - postElectra: true, - }, - { - id: "Should return deposits less than MAX_DEPOSITS post Electra before deposit_requests_start_index", - depositCount: 10 * MAX_DEPOSITS, - eth1DepositIndex: 0, - depositIndexes: Array.from({length: 10 * MAX_DEPOSITS}, (_, i) => i), - expectedReturnedIndexes: Array.from({length: MAX_DEPOSITS}, (_, i) => i), - postElectra: true, - }, - ]; - - const postElectraConfig = createChainForkConfig({ - ALTAIR_FORK_EPOCH: 1, - BELLATRIX_FORK_EPOCH: 2, - CAPELLA_FORK_EPOCH: 3, - DENEB_FORK_EPOCH: 4, - ELECTRA_FORK_EPOCH: 5, - }); - const postElectraSlot = postElectraConfig.ELECTRA_FORK_EPOCH * SLOTS_PER_EPOCH + 1; - - for (const testCase of testCases) { - const {id, depositIndexes, eth1DepositIndex, depositCount, expectedReturnedIndexes, error, postElectra} = - testCase; - it(id, async () => { - const state = postElectra - ? generateState({slot: postElectraSlot, eth1DepositIndex}, postElectraConfig) - : generateState({eth1DepositIndex}); - const cachedState = createCachedBeaconStateTest( - state, - postElectra ? postElectraConfig : createChainForkConfig({}) - ); - const eth1Data = generateEth1Data(depositCount); - const deposits = depositIndexes.map((index) => generateDepositEvent(index)); - const depositsGetter: DepositGetter = async (indexRange) => - filterBy(deposits, indexRange, (deposit) => deposit.index); - - const resultPromise = getDeposits(cachedState, eth1Data, depositsGetter); - - if (expectedReturnedIndexes) { - const result = await resultPromise; - expect(result.map((deposit) => deposit.index)).toEqual(expectedReturnedIndexes); - } else if (error != null) { - await expectRejectedWithLodestarError(resultPromise, error); - } else { - throw Error("Test case must have 'result' or 'error'"); - } - }); - } - }); - - describe("getDepositsWithProofs", () => { - it("return empty array if no pending deposits", () => { - const initialValues = [Buffer.alloc(32)]; - const depositRootTree = ssz.phase0.DepositDataRootList.toViewDU(initialValues); - const depositCount = 0; - const eth1Data = generateEth1Data(depositCount, depositRootTree); - - const deposits = getDepositsWithProofs([], depositRootTree, eth1Data); - expect(deposits).toEqual([]); - }); - - it("return deposits with valid proofs", () => { - const depositEvents = Array.from( - {length: 2}, - (_, index): phase0.DepositEvent => ({ - depositData: ssz.phase0.DepositData.defaultValue(), - blockNumber: index, - index, - }) - ); - - const depositRootTree = ssz.phase0.DepositDataRootList.defaultViewDU(); - for (const depositEvent of depositEvents) { - depositRootTree.push(ssz.phase0.DepositData.hashTreeRoot(depositEvent.depositData)); - } - const depositCount = depositEvents.length; - const eth1Data = generateEth1Data(depositCount, depositRootTree); - - const deposits = getDepositsWithProofs(depositEvents, depositRootTree, eth1Data); - - // Should not return all deposits - expect(deposits.length).toBe(2); - - // Verify each individual merkle root - for (const [index, deposit] of deposits.entries()) { - // Wrong merkle proof on deposit ${index} - expect( - verifyMerkleBranch( - ssz.phase0.DepositData.hashTreeRoot(deposit.data), - Array.from(deposit.proof).map((p) => p), - 33, - index, - eth1Data.depositRoot - ) - ).toBe(true); - } - }); - }); -}); - -function generateEth1Data(depositCount: number, depositRootTree?: DepositTree): phase0.Eth1Data { - return { - blockHash: Buffer.alloc(32), - depositRoot: depositRootTree ? depositRootTree.sliceTo(depositCount - 1).hashTreeRoot() : Buffer.alloc(32), - depositCount, - }; -} - -function generateDepositEvent(index: number, blockNumber = 0): phase0.DepositEvent { - const depositData = ssz.phase0.DepositData.defaultValue(); - depositData.amount = 32 * 10 * 9; - - return { - index, - blockNumber, - depositData, - }; -} diff --git a/packages/beacon-node/test/unit/eth1/utils/eth1Data.test.ts b/packages/beacon-node/test/unit/eth1/utils/eth1Data.test.ts deleted file mode 100644 index 89d43f3b69..0000000000 --- a/packages/beacon-node/test/unit/eth1/utils/eth1Data.test.ts +++ /dev/null @@ -1,275 +0,0 @@ -import {describe, expect, it} from "vitest"; -import {Root, phase0, ssz} from "@lodestar/types"; -import {toHex} from "@lodestar/utils"; -import {DepositTree} from "../../../../src/db/repositories/depositDataRoot.js"; -import {Eth1ErrorCode} from "../../../../src/eth1/errors.js"; -import {Eth1Block} from "../../../../src/eth1/interface.js"; -import { - getDepositRootByDepositCount, - getDepositsByBlockNumber, - getEth1DataForBlocks, -} from "../../../../src/eth1/utils/eth1Data.js"; -import {expectRejectedWithLodestarError} from "../../../utils/errors.js"; -import {iteratorFromArray} from "../../../utils/interator.js"; - -describe("eth1 / util / getEth1DataForBlocks", () => { - type TestCase = { - id: string; - blocks: Eth1Block[]; - deposits: phase0.DepositEvent[]; - depositRootTree: DepositTree; - lastProcessedDepositBlockNumber: number; - expectedEth1Data?: Partial[]; - error?: Eth1ErrorCode; - }; - - const testCases: (() => TestCase)[] = [ - () => { - // Result must contain all blocks from eth1Blocks, with backfilled eth1Data - const expectedEth1Data = [ - {blockNumber: 5, depositCount: 13}, - {blockNumber: 6, depositCount: 13}, - {blockNumber: 7, depositCount: 17}, - {blockNumber: 8, depositCount: 17}, - {blockNumber: 9, depositCount: 17}, - ]; - - // Consecutive block headers to be filled with eth1Data - const blocks = expectedEth1Data.map(({blockNumber}) => getMockBlock({blockNumber})); - - // Arbitrary list of consecutive non-uniform (blockNumber-wise) deposit roots - const deposits: phase0.DepositEvent[] = expectedEth1Data.map(({blockNumber, depositCount}) => - getMockDeposit({blockNumber, index: depositCount - 1}) - ); - const lastProcessedDepositBlockNumber = expectedEth1Data.at(-1)?.blockNumber as number; - - // Pre-fill the depositTree with roots for all deposits - const depositRootTree = ssz.phase0.DepositDataRootList.toViewDU( - Array.from({length: (deposits.at(-1)?.index as number) + 1}, (_, i) => Buffer.alloc(32, i)) - ); - - return { - id: "Normal case", - blocks, - deposits, - depositRootTree, - lastProcessedDepositBlockNumber, - expectedEth1Data, - }; - }, - - () => { - return { - id: "No deposits yet, should throw with NoDepositsForBlockRange", - blocks: [getMockBlock({blockNumber: 0})], - deposits: [], - depositRootTree: ssz.phase0.DepositDataRootList.defaultViewDU(), - lastProcessedDepositBlockNumber: 0, - error: Eth1ErrorCode.NO_DEPOSITS_FOR_BLOCK_RANGE, - }; - }, - - () => { - return { - id: "With deposits and no deposit roots, should throw with NotEnoughDepositRoots", - blocks: [getMockBlock({blockNumber: 0})], - deposits: [getMockDeposit({blockNumber: 0, index: 0})], - depositRootTree: ssz.phase0.DepositDataRootList.defaultViewDU(), - lastProcessedDepositBlockNumber: 0, - error: Eth1ErrorCode.NOT_ENOUGH_DEPOSIT_ROOTS, - }; - }, - - () => { - return { - id: "Empty case", - blocks: [], - deposits: [], - depositRootTree: ssz.phase0.DepositDataRootList.defaultViewDU(), - lastProcessedDepositBlockNumber: 0, - expectedEth1Data: [], - }; - }, - ]; - - for (const testCase of testCases) { - const {id, blocks, deposits, depositRootTree, lastProcessedDepositBlockNumber, expectedEth1Data, error} = - testCase(); - it(id, async () => { - const eth1DatasPromise = getEth1DataForBlocks( - blocks, - // Simulate a descending stream reading from DB - iteratorFromArray(deposits.reverse()), - depositRootTree, - lastProcessedDepositBlockNumber - ); - - if (expectedEth1Data) { - const eth1Datas = await eth1DatasPromise; - const eth1DatasPartial = eth1Datas.map(({blockNumber, depositCount}) => ({blockNumber, depositCount})); - expect(eth1DatasPartial).toEqual(expectedEth1Data); - } else if (error != null) { - await expectRejectedWithLodestarError(eth1DatasPromise, error); - } else { - throw Error("Test case must have 'expectedEth1Data' or 'error'"); - } - }); - } -}); - -describe("eth1 / util / getDepositsByBlockNumber", () => { - type TestCase = { - id: string; - fromBlock: number; - toBlock: number; - deposits: phase0.DepositEvent[]; - expectedResult: phase0.DepositEvent[]; - }; - - const testCases: (() => TestCase)[] = [ - () => { - const deposit0 = getMockDeposit({blockNumber: 0, index: 0}); - return { - id: "Collect deposit at block 0 in range [1,2]", - fromBlock: 1, - toBlock: 2, - deposits: [deposit0], - expectedResult: [deposit0], - }; - }, - () => { - const deposit1 = getMockDeposit({blockNumber: 1, index: 0}); - return { - id: "Collect deposit at block 1 in range [1,2]", - fromBlock: 1, - toBlock: 2, - deposits: [deposit1], - expectedResult: [deposit1], - }; - }, - () => { - const deposit3 = getMockDeposit({blockNumber: 3, index: 0}); - return { - id: "Don't collect deposit at block 3 in range [1,2]", - fromBlock: 1, - toBlock: 2, - deposits: [deposit3], - expectedResult: [], - }; - }, - () => { - const deposit0 = getMockDeposit({blockNumber: 0, index: 0}); - const deposit3 = getMockDeposit({blockNumber: 3, index: 4}); - return { - id: "Collect multiple deposits", - fromBlock: 1, - toBlock: 4, - deposits: [deposit0, deposit3], - expectedResult: [deposit0, deposit3], - }; - }, - () => { - return { - id: "Empty case", - fromBlock: 0, - toBlock: 0, - deposits: [], - expectedResult: [], - }; - }, - ]; - - for (const testCase of testCases) { - const {id, fromBlock, toBlock, deposits, expectedResult} = testCase(); - it(id, async () => { - const result = await getDepositsByBlockNumber( - fromBlock, - toBlock, // Simulate a descending stream reading from DB - iteratorFromArray(deposits.reverse()) - ); - expect(result).toEqual(expectedResult); - }); - } -}); - -describe("eth1 / util / getDepositRootByDepositCount", () => { - type TestCase = { - id: string; - depositCounts: number[]; - depositRootTree: DepositTree; - expectedMap: Map; - }; - - const fullRootMap = new Map(); - const fullDepositRootTree = ssz.phase0.DepositDataRootList.defaultViewDU(); - for (let i = 0; i < 10; i++) { - fullDepositRootTree.push(Buffer.alloc(32, i)); - fullRootMap.set(fullDepositRootTree.length, fullDepositRootTree.hashTreeRoot()); - } - - const testCases: (() => TestCase)[] = [ - () => { - return { - id: "Roots are computed correctly, all values match", - depositCounts: Array.from(fullRootMap.keys()), - depositRootTree: fullDepositRootTree, - expectedMap: fullRootMap, - }; - }, - () => { - const depositCounts = Array.from(fullRootMap.keys()).filter((n) => n % 2); - const expectedMap = new Map(); - for (const depositCount of depositCounts) { - const depositRoot = fullRootMap.get(depositCount); - if (depositRoot) expectedMap.set(depositCount, depositRoot); - } - return { - id: "Roots are computed correctly, sparse values match", - depositCounts, - depositRootTree: fullDepositRootTree, - expectedMap, - }; - }, - () => { - const emptyTree = ssz.phase0.DepositDataRootList.defaultViewDU(); - return { - id: "Empty case", - depositCounts: [], - depositRootTree: emptyTree, - expectedMap: new Map(), - }; - }, - ]; - - for (const testCase of testCases) { - const {id, depositCounts, depositRootTree, expectedMap} = testCase(); - it(id, () => { - const map = getDepositRootByDepositCount(depositCounts, depositRootTree); - expect(renderDepositRootByDepositCount(map)).toEqual(renderDepositRootByDepositCount(expectedMap)); - }); - } -}); - -function renderDepositRootByDepositCount(map: Map): Record { - const data: Record = {}; - for (const [key, root] of Object.entries(map)) { - data[key] = toHex(root); - } - return data; -} - -function getMockBlock({blockNumber}: {blockNumber: number}): Eth1Block { - return { - blockNumber, - blockHash: Buffer.alloc(32, blockNumber), - timestamp: blockNumber, - }; -} - -function getMockDeposit({blockNumber, index}: {blockNumber: number; index: number}): phase0.DepositEvent { - return { - blockNumber, - index, - depositData: {} as phase0.DepositData, // Not used - }; -} diff --git a/packages/beacon-node/test/unit/eth1/utils/eth1DepositEvent.test.ts b/packages/beacon-node/test/unit/eth1/utils/eth1DepositEvent.test.ts deleted file mode 100644 index a1de0fa748..0000000000 --- a/packages/beacon-node/test/unit/eth1/utils/eth1DepositEvent.test.ts +++ /dev/null @@ -1,46 +0,0 @@ -import {describe, expect, it} from "vitest"; -import {assertConsecutiveDeposits} from "../../../../src/eth1/utils/eth1DepositEvent.js"; - -describe("eth1 / util / assertConsecutiveDeposits", () => { - const testCases: { - id: string; - ok: boolean; - depositEvents: {index: number}[]; - }[] = [ - { - id: "sequential deposits", - ok: true, - depositEvents: [{index: 4}, {index: 5}, {index: 6}], - }, - { - id: "non sequential deposits", - ok: false, - depositEvents: [{index: 4}, {index: 7}, {index: 9}], - }, - { - id: "sequential descending deposits", - ok: false, - depositEvents: [{index: 6}, {index: 5}, {index: 4}], - }, - { - id: "single deposit", - ok: true, - depositEvents: [{index: 4}], - }, - { - id: "empty array", - ok: true, - depositEvents: [], - }, - ]; - - for (const {id, ok, depositEvents} of testCases) { - it(id, () => { - if (ok) { - assertConsecutiveDeposits(depositEvents); - } else { - expect(() => assertConsecutiveDeposits(depositEvents)).toThrow(); - } - }); - } -}); diff --git a/packages/beacon-node/test/unit/eth1/utils/eth1Vote.test.ts b/packages/beacon-node/test/unit/eth1/utils/eth1Vote.test.ts deleted file mode 100644 index d78eee1e57..0000000000 --- a/packages/beacon-node/test/unit/eth1/utils/eth1Vote.test.ts +++ /dev/null @@ -1,171 +0,0 @@ -import {describe, expect, it} from "vitest"; -import {ChainForkConfig} from "@lodestar/config"; -import {config} from "@lodestar/config/default"; -import {BeaconStateAllForks} from "@lodestar/state-transition"; -import {phase0, ssz} from "@lodestar/types"; -import { - Eth1DataGetter, - getEth1VotesToConsider, - pickEth1Vote, - votingPeriodStartTime, -} from "../../../../src/eth1/utils/eth1Vote.js"; -import {filterBy} from "../../../utils/db.js"; -import {generateState} from "../../../utils/state.js"; - -describe("eth1 / util / eth1Vote", () => { - function generateEth1Vote(i: number): phase0.Eth1Data { - return { - blockHash: Buffer.alloc(32, i), - depositRoot: Buffer.alloc(32, i), - depositCount: i, - }; - } - - describe("pickEth1Vote", () => { - // Function array to scope votes in each test case defintion - const testCases: (() => { - id: string; - eth1DataVotesInState: phase0.Eth1Data[]; - votesToConsider: phase0.Eth1Data[]; - expectedEth1Vote: phase0.Eth1Data; - })[] = [ - () => { - const vote = generateEth1Vote(0); - return { - id: "basic case, pick the only valid vote", - eth1DataVotesInState: [vote], - votesToConsider: [vote], - expectedEth1Vote: vote, - }; - }, - () => { - const vote = generateEth1Vote(0); - const voteDefault = generateEth1Vote(1); - return { - id: "no valid votes in state, pick the default first from votesToConsider", - eth1DataVotesInState: [vote], - votesToConsider: [voteDefault], - expectedEth1Vote: voteDefault, - }; - }, - () => { - const vote = generateEth1Vote(0); - return { - id: "no votes in state", - eth1DataVotesInState: [], - votesToConsider: [vote], - expectedEth1Vote: vote, - }; - }, - () => { - const vote1 = generateEth1Vote(0); - const vote2 = generateEth1Vote(1); - const vote3 = generateEth1Vote(2); - return { - id: "pick most frequent vote", - eth1DataVotesInState: [vote1, vote2, vote2, vote2, vote3], - votesToConsider: [vote1, vote2, vote3], - expectedEth1Vote: vote2, - }; - }, - () => { - const vote1 = generateEth1Vote(0); - const vote2 = generateEth1Vote(0); - return { - id: "tiebreak", - eth1DataVotesInState: [vote1, vote2], - votesToConsider: [vote1, vote2], - expectedEth1Vote: vote1, - }; - }, - ]; - - for (const testCase of testCases) { - const {id, eth1DataVotesInState, votesToConsider, expectedEth1Vote} = testCase(); - it(id, async () => { - const state = generateState({slot: 5, eth1DataVotes: eth1DataVotesInState}); - const eth1Vote = pickEth1Vote(state, votesToConsider); - expect(ssz.phase0.Eth1Data.toJson(eth1Vote)).toEqual(ssz.phase0.Eth1Data.toJson(expectedEth1Vote)); - }); - } - }); - - describe("getEth1VotesToConsider", () => { - // Function array to scope votes in each test case defintion - const testCases: (() => { - id: string; - state: BeaconStateAllForks; - eth1Datas: Eth1DataWithTimestamp[]; - expectedVotesToConsider: phase0.Eth1Data[]; - })[] = [ - () => { - const state = generateState({eth1Data: generateEth1Vote(0)}); - const timestampInRange = getTimestampInRange(config, state); - const vote1 = getEth1DataBlock({depositCount: 1, timestamp: 0}); - const vote2 = getEth1DataBlock({depositCount: 1, timestamp: timestampInRange}); - const vote3 = getEth1DataBlock({depositCount: 1, timestamp: Infinity}); - return { - id: "Only consider blocks with a timestamp in range", - state, - eth1Datas: [vote1, vote2, vote3].map(getEth1DataBlock), - expectedVotesToConsider: [vote2], - }; - }, - () => { - const state = generateState({eth1Data: generateEth1Vote(11)}); - const timestampInRange = getTimestampInRange(config, state); - const vote1 = getEth1DataBlock({depositCount: 10, timestamp: timestampInRange}); - const vote2 = getEth1DataBlock({depositCount: 12, timestamp: timestampInRange}); - return { - id: "Ensure first vote is depositCount < current state is not considered", - state, - eth1Datas: [vote1, vote2].map(getEth1DataBlock), - expectedVotesToConsider: [vote2], - }; - }, - ]; - - for (const testCase of testCases) { - const {id, state, eth1Datas, expectedVotesToConsider} = testCase(); - it(`get votesToConsider: ${id}`, async () => { - const eth1DataGetter: Eth1DataGetter = async ({timestampRange}) => - filterBy(eth1Datas, timestampRange, (eth1Data) => eth1Data.timestamp); - - const votesToConsider = await getEth1VotesToConsider(config, state, eth1DataGetter); - - expect(votesToConsider.map((eth1Data) => ssz.phase0.Eth1Data.toJson(eth1Data))).toEqual( - expectedVotesToConsider.map((eth1Data) => ssz.phase0.Eth1Data.toJson(eth1Data)) - ); - }); - } - }); -}); - -interface Eth1DataWithTimestamp extends phase0.Eth1Data { - timestamp: number; -} - -/** - * Util: Fill partial eth1DataBlock with mock data - * @param eth1DataBlock - */ -function getEth1DataBlock(eth1DataBlock: Partial): Eth1DataWithTimestamp { - return { - blockHash: Buffer.alloc(32), - depositRoot: Buffer.alloc(32), - depositCount: 0, - timestamp: 0, - ...eth1DataBlock, - }; -} - -/** - * Util: Get a mock timestamp that passes isCandidateBlock validation - * @param config - * @param state - */ -function getTimestampInRange(config: ChainForkConfig, state: BeaconStateAllForks): number { - const {SECONDS_PER_ETH1_BLOCK, ETH1_FOLLOW_DISTANCE} = config; - const periodStart = votingPeriodStartTime(config, state); - return periodStart - SECONDS_PER_ETH1_BLOCK * ETH1_FOLLOW_DISTANCE; -} diff --git a/packages/beacon-node/test/unit/eth1/utils/groupDepositEventsByBlock.test.ts b/packages/beacon-node/test/unit/eth1/utils/groupDepositEventsByBlock.test.ts deleted file mode 100644 index c855701ae6..0000000000 --- a/packages/beacon-node/test/unit/eth1/utils/groupDepositEventsByBlock.test.ts +++ /dev/null @@ -1,34 +0,0 @@ -import {describe, expect, it} from "vitest"; -import {phase0} from "@lodestar/types"; -import {groupDepositEventsByBlock} from "../../../../src/eth1/utils/groupDepositEventsByBlock.js"; - -describe("eth1 / util / groupDepositEventsByBlock", () => { - it("should return deposit events by block sorted by index", () => { - const depositData = { - amount: 0, - signature: Buffer.alloc(96), - withdrawalCredentials: Buffer.alloc(32), - pubkey: Buffer.alloc(48), - }; - const depositEvents: phase0.DepositEvent[] = [ - {blockNumber: 1, index: 0, depositData}, - {blockNumber: 2, index: 2, depositData}, - {blockNumber: 2, index: 1, depositData}, - {blockNumber: 3, index: 4, depositData}, - {blockNumber: 3, index: 3, depositData}, - ]; - const blockEvents = groupDepositEventsByBlock(depositEvents); - - // Keep only the relevant info of the result - const blockEventsIndexOnly = blockEvents.map((blockEvent) => ({ - blockNumber: blockEvent.blockNumber, - deposits: blockEvent.depositEvents.map((deposit) => deposit.index), - })); - - expect(blockEventsIndexOnly).toEqual([ - {blockNumber: 1, deposits: [0]}, - {blockNumber: 2, deposits: [1, 2]}, - {blockNumber: 3, deposits: [3, 4]}, - ]); - }); -}); diff --git a/packages/beacon-node/test/unit/eth1/utils/optimizeNextBlockDiffForGenesis.test.ts b/packages/beacon-node/test/unit/eth1/utils/optimizeNextBlockDiffForGenesis.test.ts deleted file mode 100644 index 38a4e6fa83..0000000000 --- a/packages/beacon-node/test/unit/eth1/utils/optimizeNextBlockDiffForGenesis.test.ts +++ /dev/null @@ -1,55 +0,0 @@ -import {describe, expect, it} from "vitest"; -import {Eth1Block} from "../../../../src/eth1/interface.js"; -import {optimizeNextBlockDiffForGenesis} from "../../../../src/eth1/utils/optimizeNextBlockDiffForGenesis.js"; - -describe("eth1 / utils / optimizeNextBlockDiffForGenesis", () => { - it("should return optimized block diff to find genesis time", () => { - const params = { - MIN_GENESIS_TIME: 1578009600, - GENESIS_DELAY: 172800, - SECONDS_PER_ETH1_BLOCK: 14, - }; - const initialTimeDiff = params.GENESIS_DELAY * 2; - let lastFetchedBlock: Eth1Block = { - blockHash: Buffer.alloc(32, 0), - blockNumber: 100000, - timestamp: params.MIN_GENESIS_TIME - initialTimeDiff, - }; - - const diffRecord: {blockDiff: number; number: number}[] = []; - for (let i = 0; i < 100; i++) { - const blockDiff = optimizeNextBlockDiffForGenesis(lastFetchedBlock, params); - - // Simulate fetching the next block - lastFetchedBlock = { - blockHash: Buffer.alloc(32, 0), - blockNumber: lastFetchedBlock.blockNumber + blockDiff, - timestamp: lastFetchedBlock.timestamp + blockDiff * params.SECONDS_PER_ETH1_BLOCK, - }; - - if (lastFetchedBlock.timestamp > params.MIN_GENESIS_TIME - params.GENESIS_DELAY) { - break; - } - diffRecord.push({number: lastFetchedBlock.blockNumber, blockDiff}); - } - - // Make sure the returned diffs converge to genesis time fast - expect(diffRecord).toEqual([ - {number: 106171, blockDiff: 6171}, - {number: 109256, blockDiff: 3085}, - {number: 110799, blockDiff: 1543}, - {number: 111570, blockDiff: 771}, - {number: 111956, blockDiff: 386}, - {number: 112149, blockDiff: 193}, - {number: 112245, blockDiff: 96}, - {number: 112293, blockDiff: 48}, - {number: 112317, blockDiff: 24}, - {number: 112329, blockDiff: 12}, - {number: 112335, blockDiff: 6}, - {number: 112338, blockDiff: 3}, - {number: 112340, blockDiff: 2}, - {number: 112341, blockDiff: 1}, - {number: 112342, blockDiff: 1}, - ]); - }); -}); diff --git a/packages/beacon-node/test/unit/eth1/hexEncoding.test.ts b/packages/beacon-node/test/unit/execution/engine/hexEncoding.test.ts similarity index 95% rename from packages/beacon-node/test/unit/eth1/hexEncoding.test.ts rename to packages/beacon-node/test/unit/execution/engine/hexEncoding.test.ts index bb2782f2f7..27985c0b5c 100644 --- a/packages/beacon-node/test/unit/eth1/hexEncoding.test.ts +++ b/packages/beacon-node/test/unit/execution/engine/hexEncoding.test.ts @@ -6,9 +6,9 @@ import { quantityToBigint, quantityToBytes, quantityToNum, -} from "../../../src/eth1/provider/utils.js"; +} from "../../../../src/execution/engine/utils.js"; -describe("eth1 / hex encoding", () => { +describe("execution / engine / hex encoding", () => { describe("QUANTITY", () => { const testCases: { quantity: QUANTITY; diff --git a/packages/beacon-node/test/unit/eth1/jwt.test.ts b/packages/beacon-node/test/unit/execution/engine/jwt.test.ts similarity index 94% rename from packages/beacon-node/test/unit/eth1/jwt.test.ts rename to packages/beacon-node/test/unit/execution/engine/jwt.test.ts index c96344b37c..b72a2d9511 100644 --- a/packages/beacon-node/test/unit/eth1/jwt.test.ts +++ b/packages/beacon-node/test/unit/execution/engine/jwt.test.ts @@ -1,7 +1,7 @@ import {describe, expect, it} from "vitest"; -import {decodeJwtToken, encodeJwtToken} from "../../../src/eth1/provider/jwt.js"; +import {decodeJwtToken, encodeJwtToken} from "../../../../src/execution/engine/jwt.js"; -describe("ExecutionEngine / jwt", () => { +describe("execution / engine / jwt", () => { it("encode/decode correctly", () => { const jwtSecret = Buffer.from(Array.from({length: 32}, () => Math.round(Math.random() * 255))); const claim = {iat: Math.floor(new Date().getTime() / 1000)}; diff --git a/packages/beacon-node/test/unit/execution/engine/utils.test.ts b/packages/beacon-node/test/unit/execution/engine/utils.test.ts index 098ed489aa..08f3b34a0d 100644 --- a/packages/beacon-node/test/unit/execution/engine/utils.test.ts +++ b/packages/beacon-node/test/unit/execution/engine/utils.test.ts @@ -1,6 +1,6 @@ import {describe, expect, it} from "vitest"; import {ErrorAborted, FetchError} from "@lodestar/utils"; -import {ErrorJsonRpcResponse, HttpRpcError} from "../../../../src/eth1/provider/jsonRpcHttpClient.js"; +import {ErrorJsonRpcResponse, HttpRpcError} from "../../../../src/execution/engine/jsonRpcHttpClient.js"; import { HTTP_CONNECTION_ERROR_CODES, HTTP_FATAL_ERROR_CODES, diff --git a/packages/beacon-node/test/unit/executionEngine/http.test.ts b/packages/beacon-node/test/unit/executionEngine/http.test.ts index d50519f41e..86e4f39254 100644 --- a/packages/beacon-node/test/unit/executionEngine/http.test.ts +++ b/packages/beacon-node/test/unit/executionEngine/http.test.ts @@ -2,14 +2,13 @@ import {fastify} from "fastify"; import {afterAll, beforeAll, describe, expect, it} from "vitest"; import {Logger} from "@lodestar/logger"; import {ForkName} from "@lodestar/params"; -import {RpcPayload} from "../../../src/eth1/interface.js"; -import {numToQuantity} from "../../../src/eth1/provider/utils.js"; import {defaultExecutionEngineHttpOpts} from "../../../src/execution/engine/http.js"; import { parseExecutionPayload, serializeExecutionPayload, serializeExecutionPayloadBody, } from "../../../src/execution/engine/types.js"; +import {RpcPayload, numToQuantity} from "../../../src/execution/engine/utils.js"; import {IExecutionEngine, initializeExecutionEngine} from "../../../src/execution/index.js"; describe("ExecutionEngine / http", () => { diff --git a/packages/beacon-node/test/unit/executionEngine/httpRetry.test.ts b/packages/beacon-node/test/unit/executionEngine/httpRetry.test.ts index 85340bb0d3..69f0a9ef29 100644 --- a/packages/beacon-node/test/unit/executionEngine/httpRetry.test.ts +++ b/packages/beacon-node/test/unit/executionEngine/httpRetry.test.ts @@ -3,8 +3,8 @@ import {afterAll, beforeAll, describe, expect, it} from "vitest"; import {fromHexString} from "@chainsafe/ssz"; import {Logger} from "@lodestar/logger"; import {ForkName} from "@lodestar/params"; -import {bytesToData, numToQuantity} from "../../../src/eth1/provider/utils.js"; import {defaultExecutionEngineHttpOpts} from "../../../src/execution/engine/http.js"; +import {bytesToData, numToQuantity} from "../../../src/execution/engine/utils.js"; import {IExecutionEngine, PayloadAttributes, initializeExecutionEngine} from "../../../src/execution/index.js"; describe("ExecutionEngine / http ", () => { diff --git a/packages/beacon-node/test/utils/networkWithMockDb.ts b/packages/beacon-node/test/utils/networkWithMockDb.ts index 5194713fd3..3cbf1563d6 100644 --- a/packages/beacon-node/test/utils/networkWithMockDb.ts +++ b/packages/beacon-node/test/utils/networkWithMockDb.ts @@ -3,7 +3,6 @@ import {ChainForkConfig, createBeaconConfig} from "@lodestar/config"; import {ssz} from "@lodestar/types"; import {sleep} from "@lodestar/utils"; import {BeaconChain} from "../../src/chain/chain.js"; -import {Eth1ForBlockProductionDisabled} from "../../src/eth1/index.js"; import {ExecutionEngineDisabled} from "../../src/execution/index.js"; import {ArchiveMode} from "../../src/index.js"; import {GossipHandlers, Network, NetworkInitModules, getReqRespHandlers} from "../../src/network/index.js"; @@ -76,7 +75,6 @@ export async function getNetworkForTest( validatorMonitor: null, anchorState: createCachedBeaconStateTest(state, beaconConfig), isAnchorStateFinalized: true, - eth1: new Eth1ForBlockProductionDisabled(), executionEngine: new ExecutionEngineDisabled(), } ); diff --git a/packages/beacon-node/test/utils/node/beacon.ts b/packages/beacon-node/test/utils/node/beacon.ts index 472749ab17..264b1df14c 100644 --- a/packages/beacon-node/test/utils/node/beacon.ts +++ b/packages/beacon-node/test/utils/node/beacon.ts @@ -17,7 +17,7 @@ import {BeaconNode} from "../../../src/index.js"; import {defaultNetworkOptions} from "../../../src/network/options.js"; import {IBeaconNodeOptions, defaultOptions} from "../../../src/node/options.js"; import {InteropStateOpts} from "../../../src/node/utils/interop/state.js"; -import {initDevState, writeDeposits} from "../../../src/node/utils/state.js"; +import {initDevState} from "../../../src/node/utils/state.js"; import {testLogger} from "../logger.js"; export async function getDevBeaconNode( @@ -45,13 +45,10 @@ export async function getDevBeaconNode( let anchorState = opts.anchorState; if (!anchorState) { - const {state, deposits} = initDevState(config, validatorCount, opts); - anchorState = state; + anchorState = initDevState(config, validatorCount, opts); - // Is it necessary to persist deposits and genesis block? - await writeDeposits(db, deposits); const block = config.getForkTypes(GENESIS_SLOT).SignedBeaconBlock.defaultValue(); - block.message.stateRoot = state.hashTreeRoot(); + block.message.stateRoot = anchorState.hashTreeRoot(); await db.blockArchive.add(block); if (config.getForkSeq(GENESIS_SLOT) >= ForkSeq.deneb) { @@ -69,7 +66,6 @@ export async function getDevBeaconNode( // dev defaults that we wish, especially for the api options { db: {name: tmpDir.name}, - eth1: {enabled: false}, api: {rest: {api: ["beacon", "config", "events", "node", "validator"], port: 19596}}, metrics: {enabled: false}, network: { diff --git a/packages/beacon-node/test/utils/runEl.ts b/packages/beacon-node/test/utils/runEl.ts index 1614a899b0..f88efdaed2 100644 --- a/packages/beacon-node/test/utils/runEl.ts +++ b/packages/beacon-node/test/utils/runEl.ts @@ -1,10 +1,8 @@ import {spawn} from "node:child_process"; import fs from "node:fs"; import net from "node:net"; -import {ChainConfig} from "@lodestar/config"; -import {sleep} from "@lodestar/utils"; -import {ZERO_HASH} from "../../src/constants/index.js"; -import {Eth1Provider} from "../../src/index.js"; +import {fromHex, sleep} from "@lodestar/utils"; +import {JsonRpcHttpClient} from "../../src/execution/engine/jsonRpcHttpClient.js"; import {shell} from "../sim/shell.js"; let txRpcId = 1; @@ -87,18 +85,20 @@ async function getGenesisBlockHash( {providerUrl, jwtSecretHex}: {providerUrl: string; jwtSecretHex?: string}, signal: AbortSignal ): Promise { - const eth1Provider = new Eth1Provider( - {DEPOSIT_CONTRACT_ADDRESS: ZERO_HASH} as Partial as ChainConfig, - {providerUrls: [providerUrl], jwtSecretHex}, - signal - ); + const rpc = new JsonRpcHttpClient([providerUrl], { + signal, + jwtSecret: jwtSecretHex ? fromHex(jwtSecretHex) : undefined, + }); // Need to run multiple tries because nethermind sometimes is not yet ready and throws error // of connection refused while fetching genesis block for (let i = 1; i <= 60; i++) { console.log(`fetching genesisBlock hash, try: ${i}`); try { - const genesisBlock = await eth1Provider.getBlockByNumber(0); + const genesisBlock = await rpc.fetch<{hash: string}>({ + method: "eth_getBlockByNumber", + params: ["0x0", false], + }); console.log({genesisBlock}); if (!genesisBlock) { throw Error("No genesis block available"); diff --git a/packages/beacon-node/test/utils/testnet.ts b/packages/beacon-node/test/utils/testnet.ts deleted file mode 100644 index 95abc185b6..0000000000 --- a/packages/beacon-node/test/utils/testnet.ts +++ /dev/null @@ -1,51 +0,0 @@ -import {fromHexString} from "@chainsafe/ssz"; -import {ChainForkConfig, createChainForkConfig} from "@lodestar/config"; -import {chainConfig} from "@lodestar/config/default"; -import {phase0} from "@lodestar/types"; - -/** Generic testnet data taken from the Medalla testnet */ -export const medallaTestnetConfig = { - depositBlock: 3085928, - // Optimized blocks for quick testing - blockWithDepositActivity: 3124889, -}; - -/** Testnet specs for the Medalla testnet */ -export function getTestnetConfig(): ChainForkConfig { - const config = createChainForkConfig(chainConfig); - config.DEPOSIT_NETWORK_ID = 5; - config.DEPOSIT_CONTRACT_ADDRESS = Buffer.from("07b39F4fDE4A38bACe212b546dAc87C58DfE3fDC", "hex"); - config.MIN_GENESIS_TIME = 1596546000; - config.GENESIS_DELAY = 172800; - config.GENESIS_FORK_VERSION = Buffer.from("00000001", "hex"); - return config; -} - -/** Goerli deposit log for the Medalla testnet */ -export const goerliTestnetLogs = [ - { - // Raw unparsed log index 6833 - blockNumber: 3124930, - txHash: "0x9662b35ea4128fafe8185f8b4b0b890f72009d31e9d65a8f2ad5712f74910644", - topics: ["0x649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c5"], - data: "0x00000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001400000000000000000000000000000000000000000000000000000000000000180000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000308214eabc827a4deaed78c0bf3f91d81b57968041b5d7c975c716641ccfac7aa4e11e3354a357b1f40637e282fd66403500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000bb991061d2545c75e788b93f3425b03b05f0d2aae8e97da30d7d04886b9eb700000000000000000000000000000000000000000000000000000000000000080040597307000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000006099cb82bc69b4111d1a828963f0316ec9aa38c4e9e041a8afec86cd20dfe9a590999845bf01d4689f3bbe3df54e48695e081f1216027b577c7fccf6ab0a4fcc75faf8009c6b55e518478139f604f542d138ae3bc34bad01ee6002006d64c4ff820000000000000000000000000000000000000000000000000000000000000008b11a000000000000000000000000000000000000000000000000000000000000", - }, -]; - -/** Goerli parsed deposit event for the Medalla testnet */ -export const goerliTestnetDepositEvents: phase0.DepositEvent[] = [ - { - blockNumber: 3124930, - index: 6833, - depositData: { - pubkey: fromHexString( - "8214EABC827A4DEAED78C0BF3F91D81B57968041B5D7C975C716641CCFAC7AA4E11E3354A357B1F40637E282FD664035" - ), - withdrawalCredentials: fromHexString("00BB991061D2545C75E788B93F3425B03B05F0D2AAE8E97DA30D7D04886B9EB7"), - amount: 32e9, - signature: fromHexString( - "99CB82BC69B4111D1A828963F0316EC9AA38C4E9E041A8AFEC86CD20DFE9A590999845BF01D4689F3BBE3DF54E48695E081F1216027B577C7FCCF6AB0A4FCC75FAF8009C6B55E518478139F604F542D138AE3BC34BAD01EE6002006D64C4FF82" - ), - }, - }, -]; diff --git a/packages/cli/src/cmds/beacon/handler.ts b/packages/cli/src/cmds/beacon/handler.ts index 825e7fa8f0..a502b261ca 100644 --- a/packages/cli/src/cmds/beacon/handler.ts +++ b/packages/cli/src/cmds/beacon/handler.ts @@ -10,7 +10,7 @@ import {ACTIVE_PRESET, PresetName} from "@lodestar/params"; import {ErrorAborted, bytesToInt, formatBytes} from "@lodestar/utils"; import {ProcessShutdownCallback} from "@lodestar/validator"; import {BeaconNodeOptions, getBeaconConfigFromArgs} from "../../config/index.js"; -import {getNetworkBootnodes, getNetworkData, isKnownNetworkName, readBootnodes} from "../../networks/index.js"; +import {getNetworkBootnodes, isKnownNetworkName, readBootnodes} from "../../networks/index.js"; import {GlobalArgs, parseBeaconNodeArgs} from "../../options/index.js"; import {LogArgs} from "../../options/logOptions.js"; import { @@ -71,13 +71,11 @@ export async function beaconHandler(args: BeaconArgs & GlobalArgs): Promise { if (args.forceCheckpointSync && !(args.checkpointState || args.checkpointSyncUrl || args.unsafeCheckpointState)) { throw new Error("Forced checkpoint sync without specifying a checkpointState or checkpointSyncUrl"); @@ -333,9 +328,7 @@ export async function initBeaconState( return {anchorState, isFinalized}; } - // Only place we will not bother checking isWithinWeakSubjectivityPeriod as forceGenesis passed by user - const anchorState = await initStateFromEth1({config: chainForkConfig, db, logger, opts: options.eth1, signal}); - return {anchorState, isFinalized: true}; + throw Error("Failed to initialize beacon state, please provide a genesis state file or use checkpoint sync"); } async function readWSState( diff --git a/packages/cli/src/cmds/beacon/options.ts b/packages/cli/src/cmds/beacon/options.ts index 1c6318961f..82660e4eab 100644 --- a/packages/cli/src/cmds/beacon/options.ts +++ b/packages/cli/src/cmds/beacon/options.ts @@ -150,7 +150,7 @@ export const beaconExtraOptions: CliCommandOptions = { private: { description: - "Do not send implementation details over p2p identify protocol and in builder, execution engine and eth1 requests", + "Do not send implementation details over p2p identify protocol, and in builder and execution engine requests", type: "boolean", }, diff --git a/packages/cli/src/cmds/dev/files.ts b/packages/cli/src/cmds/dev/files.ts index c8536b8e5c..11367eeee5 100644 --- a/packages/cli/src/cmds/dev/files.ts +++ b/packages/cli/src/cmds/dev/files.ts @@ -15,7 +15,7 @@ export async function writeTestnetFiles( const genesisTime = Math.floor(Date.now() / 1000); const eth1BlockHash = Buffer.alloc(32, 0); - const {state} = nodeUtils.initDevState(config, genesisValidators, {genesisTime, eth1BlockHash}); + const state = nodeUtils.initDevState(config, genesisValidators, {genesisTime, eth1BlockHash}); // Write testnet data fs.mkdirSync(targetDir, {recursive: true}); diff --git a/packages/cli/src/cmds/dev/handler.ts b/packages/cli/src/cmds/dev/handler.ts index 9bccd695bd..2b591f2a9c 100644 --- a/packages/cli/src/cmds/dev/handler.ts +++ b/packages/cli/src/cmds/dev/handler.ts @@ -60,7 +60,7 @@ export async function devHandler(args: IDevArgs & GlobalArgs): Promise { const genesisTime = args.genesisTime ?? Math.floor(Date.now() / 1000) + 5; const eth1BlockHash = fromHex(args.genesisEth1Hash ?? toHex(Buffer.alloc(32, 0x0b))); - const {state} = nodeUtils.initDevState(config, validatorCount, {genesisTime, eth1BlockHash}); + const state = nodeUtils.initDevState(config, validatorCount, {genesisTime, eth1BlockHash}); args.genesisStateFile = "genesis.ssz"; fs.writeFileSync(args.genesisStateFile, state.serialize()); diff --git a/packages/cli/src/cmds/dev/options.ts b/packages/cli/src/cmds/dev/options.ts index 3bbc32f0ab..b6065aee44 100644 --- a/packages/cli/src/cmds/dev/options.ts +++ b/packages/cli/src/cmds/dev/options.ts @@ -80,11 +80,6 @@ const externalOptionsOverrides: Partial): IBeaconNodeOptions["eth1"] { - let jwtSecretHex: string | undefined; - let jwtId: string | undefined; - - let providerUrls = args["eth1.providerUrls"]; - - // If no providerUrls are explicitly provided, we should pick the execution endpoint - // because as per Kiln spec v2.1, execution *must* host the `eth_` methods necessary - // for deposit and merge trackers on engine endpoints as well protected by a - // jwt auth mechanism. - if (providerUrls === undefined && args["execution.urls"]) { - providerUrls = args["execution.urls"]; - jwtSecretHex = args.jwtSecret ? extractJwtHexSecret(fs.readFileSync(args.jwtSecret, "utf-8").trim()) : undefined; - jwtId = args.jwtId; - } - - return { - enabled: args.eth1, - providerUrls, - jwtSecretHex, - jwtId, - depositContractDeployBlock: args["eth1.depositContractDeployBlock"], - disableEth1DepositDataTracker: args["eth1.disableEth1DepositDataTracker"], - unsafeAllowDepositDataOverwrite: args["eth1.unsafeAllowDepositDataOverwrite"], - forcedEth1DataVote: args["eth1.forcedEth1DataVote"], - }; -} - +/** + * @deprecated These options are no longer used since eth1 deposit tracking was removed. + */ export const options: CliCommandOptions = { eth1: { + hidden: true, + deprecated: true, description: "Whether to follow the eth1 chain", type: "boolean", - defaultDescription: String(defaultOptions.eth1.enabled), group: "eth1", }, "eth1.providerUrls": { + hidden: true, + deprecated: true, description: "Urls to Eth1 node with enabled rpc. If not explicitly provided and execution endpoint provided via execution.urls, it will use execution.urls. Otherwise will try connecting on the specified default(s)", - defaultDescription: defaultOptions.eth1.providerUrls?.join(","), type: "array", string: true, coerce: (urls: string[]): string[] => @@ -63,31 +45,32 @@ export const options: CliCommandOptions = { "eth1.depositContractDeployBlock": { hidden: true, + deprecated: true, description: "Block number at which the deposit contract contract was deployed", type: "number", - defaultDescription: String(defaultOptions.eth1.depositContractDeployBlock), group: "eth1", }, "eth1.disableEth1DepositDataTracker": { hidden: true, + deprecated: true, description: "Disable Eth1DepositDataTracker modules", type: "boolean", - defaultDescription: String(defaultOptions.eth1.disableEth1DepositDataTracker), group: "eth1", }, "eth1.unsafeAllowDepositDataOverwrite": { hidden: true, + deprecated: true, description: "Allow the deposit tracker to overwrite previously fetched and saved deposit event data. Warning!!! This is an unsafe operation, so enable this flag only if you know what you are doing.", type: "boolean", - defaultDescription: String(defaultOptions.eth1.unsafeAllowDepositDataOverwrite), group: "eth1", }, "eth1.forcedEth1DataVote": { hidden: true, + deprecated: true, description: "Vote for a specific eth1_data regardless of all conditions. Hex encoded ssz serialized Eth1Data type", type: "string", group: "eth1", diff --git a/packages/cli/src/options/beaconNodeOptions/index.ts b/packages/cli/src/options/beaconNodeOptions/index.ts index 23154df82a..686596e7e7 100644 --- a/packages/cli/src/options/beaconNodeOptions/index.ts +++ b/packages/cli/src/options/beaconNodeOptions/index.ts @@ -27,7 +27,6 @@ export function parseBeaconNodeArgs(args: BeaconNodeArgs): RecursivePartial { expect(privateKey.equals(prevPk)).toBe(true); }); - it("Set known deposit contract", async () => { - const {options} = await runBeaconHandlerInit({ - network: "mainnet", - }); - - // Okay to hardcode, since this value will never change - expect(options.eth1.depositContractDeployBlock).toBe(11052984); - }); - it("Apply custom network name from config file", async () => { const networkName = "test-network"; const config = {...chainConfig}; diff --git a/packages/cli/test/unit/config/beaconNodeOptions.test.ts b/packages/cli/test/unit/config/beaconNodeOptions.test.ts index 06d7faf8d0..eaa0736460 100644 --- a/packages/cli/test/unit/config/beaconNodeOptions.test.ts +++ b/packages/cli/test/unit/config/beaconNodeOptions.test.ts @@ -12,8 +12,8 @@ describe("config / beaconNodeOptions", () => { }); it("Should return added partial options", () => { - const initialPartialOptions = {eth1: {enabled: true}}; - const editedPartialOptions = {eth1: {enabled: false}}; + const initialPartialOptions = {metrics: {enabled: true}}; + const editedPartialOptions = {metrics: {enabled: false}}; const beaconNodeOptions = new BeaconNodeOptions(initialPartialOptions); beaconNodeOptions.set(editedPartialOptions); diff --git a/packages/cli/test/unit/options/beaconNodeOptions.test.ts b/packages/cli/test/unit/options/beaconNodeOptions.test.ts index c32a35132f..9843bffda9 100644 --- a/packages/cli/test/unit/options/beaconNodeOptions.test.ts +++ b/packages/cli/test/unit/options/beaconNodeOptions.test.ts @@ -1,9 +1,7 @@ -import fs from "node:fs"; import {describe, expect, it} from "vitest"; import {ArchiveMode, IBeaconNodeOptions} from "@lodestar/beacon-node"; import {RecursivePartial} from "@lodestar/utils"; import {BeaconNodeArgs, parseBeaconNodeArgs} from "../../../src/options/beaconNodeOptions/index.js"; -import {getTestdirPath} from "../../utils.js"; describe("options / beaconNodeOptions", () => { it("Should parse BeaconNodeArgs", () => { @@ -45,15 +43,6 @@ describe("options / beaconNodeOptions", () => { "chain.archiveMode": ArchiveMode.Frequency, emitPayloadAttributes: false, - eth1: true, - "eth1.providerUrl": "http://my.node:8545", - "eth1.providerUrls": ["http://my.node:8545"], - "eth1.depositContractDeployBlock": 1625314, - "eth1.disableEth1DepositDataTracker": true, - "eth1.unsafeAllowDepositDataOverwrite": false, - "eth1.forcedEth1DataVote": - "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa0000000000000000bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "execution.urls": ["http://localhost:8551"], "execution.timeout": 12000, "execution.retryDelay": 2000, @@ -153,15 +142,6 @@ describe("options / beaconNodeOptions", () => { maxCPStateEpochsInMemory: 100, maxCPStateEpochsOnDisk: 1000, }, - eth1: { - enabled: true, - providerUrls: ["http://my.node:8545"], - depositContractDeployBlock: 1625314, - disableEth1DepositDataTracker: true, - unsafeAllowDepositDataOverwrite: false, - forcedEth1DataVote: - "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa0000000000000000bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - }, executionEngine: { urls: ["http://localhost:8551"], retries: 1, @@ -230,28 +210,4 @@ describe("options / beaconNodeOptions", () => { const options = parseBeaconNodeArgs(beaconNodeArgsPartial); expect(options).toEqual(expectedOptions); }); - - it("Should use execution endpoint & jwt for eth1", () => { - const jwtSecretFile = getTestdirPath("./jwtsecret"); - const jwtSecretHex = "0xdc6457099f127cf0bac78de8b297df04951281909db4f58b43def7c7151e765d"; - fs.writeFileSync(jwtSecretFile, jwtSecretHex, {encoding: "utf8"}); - - // Cast to match the expected fully defined type - const beaconNodeArgsPartial = { - eth1: true, - "execution.urls": ["http://my.node:8551"], - jwtSecret: jwtSecretFile, - } as BeaconNodeArgs; - - const expectedOptions: RecursivePartial = { - eth1: { - enabled: true, - providerUrls: ["http://my.node:8551"], - jwtSecretHex, - }, - }; - - const options = parseBeaconNodeArgs(beaconNodeArgsPartial); - expect(options.eth1).toEqual(expectedOptions.eth1); - }); }); diff --git a/packages/cli/test/utils/crucible/clients/beacon/lodestar.ts b/packages/cli/test/utils/crucible/clients/beacon/lodestar.ts index beb5c513f2..86d5a6f96a 100644 --- a/packages/cli/test/utils/crucible/clients/beacon/lodestar.ts +++ b/packages/cli/test/utils/crucible/clients/beacon/lodestar.ts @@ -57,11 +57,9 @@ export const generateLodestarBeaconNode: BeaconNodeGenerator Number Map */ pubkey2index: PubkeyIndexMap; diff --git a/packages/validator/src/services/validatorStore.ts b/packages/validator/src/services/validatorStore.ts index c20bc54276..6f24ef6738 100644 --- a/packages/validator/src/services/validatorStore.ts +++ b/packages/validator/src/services/validatorStore.ts @@ -53,7 +53,7 @@ import {DoppelgangerService} from "./doppelgangerService.js"; import {IndicesService} from "./indices.js"; type BLSPubkeyMaybeHex = BLSPubkey | PubkeyHex; -type Eth1Address = string; +type ExecutionAddress = string; export enum SignerType { Local, @@ -74,7 +74,7 @@ export type SignerRemote = { type DefaultProposerConfig = { graffiti?: string; strictFeeRecipientCheck: boolean; - feeRecipient: Eth1Address; + feeRecipient: ExecutionAddress; builder: { gasLimit: number; selection: routes.validator.BuilderSelection; @@ -85,7 +85,7 @@ type DefaultProposerConfig = { export type ProposerConfig = { graffiti?: string; strictFeeRecipientCheck?: boolean; - feeRecipient?: Eth1Address; + feeRecipient?: ExecutionAddress; builder?: { gasLimit?: number; selection?: routes.validator.BuilderSelection; @@ -219,7 +219,7 @@ export class ValidatorStore { : this.indicesService.pollValidatorIndices(Array.from(this.validators.keys())); } - getFeeRecipient(pubkeyHex: PubkeyHex): Eth1Address { + getFeeRecipient(pubkeyHex: PubkeyHex): ExecutionAddress { const validatorData = this.validators.get(pubkeyHex); if (validatorData === undefined) { throw Error(`Validator pubkey ${pubkeyHex} not known`); @@ -227,12 +227,12 @@ export class ValidatorStore { return validatorData.feeRecipient ?? this.defaultProposerConfig.feeRecipient; } - getFeeRecipientByIndex(index: ValidatorIndex): Eth1Address { + getFeeRecipientByIndex(index: ValidatorIndex): ExecutionAddress { const pubkey = this.indicesService.index2pubkey.get(index); return pubkey ? this.getFeeRecipient(pubkey) : this.defaultProposerConfig.feeRecipient; } - setFeeRecipient(pubkeyHex: PubkeyHex, feeRecipient: Eth1Address): void { + setFeeRecipient(pubkeyHex: PubkeyHex, feeRecipient: ExecutionAddress): void { const validatorData = this.validators.get(pubkeyHex); if (validatorData === undefined) { throw Error(`Validator pubkey ${pubkeyHex} not known`); @@ -696,7 +696,7 @@ export class ValidatorStore { async signValidatorRegistration( pubkeyMaybeHex: BLSPubkeyMaybeHex, - regAttributes: {feeRecipient: Eth1Address; gasLimit: number}, + regAttributes: {feeRecipient: ExecutionAddress; gasLimit: number}, _slot: Slot ): Promise { const pubkey = typeof pubkeyMaybeHex === "string" ? fromHex(pubkeyMaybeHex) : pubkeyMaybeHex; @@ -727,7 +727,7 @@ export class ValidatorStore { async getValidatorRegistration( pubkeyMaybeHex: BLSPubkeyMaybeHex, - regAttributes: {feeRecipient: Eth1Address; gasLimit: number}, + regAttributes: {feeRecipient: ExecutionAddress; gasLimit: number}, slot: Slot ): Promise { const pubkeyHex = typeof pubkeyMaybeHex === "string" ? pubkeyMaybeHex : toPubkeyHex(pubkeyMaybeHex); diff --git a/scripts/dev/node1.sh b/scripts/dev/node1.sh index 742eb45394..f269839108 100755 --- a/scripts/dev/node1.sh +++ b/scripts/dev/node1.sh @@ -13,6 +13,5 @@ GENESIS_TIME=$(date +%s) --rest.namespace '*' \ --metrics \ --logLevel debug \ - --eth1 false \ --network.rateLimitMultiplier 0 \ $@ diff --git a/scripts/dev/node2.sh b/scripts/dev/node2.sh index c95cfd5ef4..a3aed49f52 100755 --- a/scripts/dev/node2.sh +++ b/scripts/dev/node2.sh @@ -15,7 +15,6 @@ GENESIS_TIME=$(curl -s http://localhost:9596/eth/v1/beacon/genesis | jq -r .data --metrics \ --metrics.port 8009 \ --logLevel debug \ - --eth1 false \ --port 9001 \ --rest.port 9597 \ --network.connectToDiscv5Bootnodes true \ From 2fe8de2346a1d47a0618e6881b8fb56d3b113a15 Mon Sep 17 00:00:00 2001 From: Nico Flaig Date: Wed, 17 Dec 2025 12:26:44 +0100 Subject: [PATCH 08/20] chore: add lerna exec to fix build watch/ifchanged commands (#8704) `yarn build:watch` and `yarn build:ifchanged` no longer work since https://github.com/ChainSafe/lodestar/pull/8675 since `lerna exec` requires to install a separate package `@lerna-lite/exec` to work properly --- package.json | 1 + yarn.lock | 95 ++++++++++++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 94 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 714c7a3666..e2368a9975 100644 --- a/package.json +++ b/package.json @@ -46,6 +46,7 @@ "@chainsafe/benchmark": "^1.2.3", "@chainsafe/biomejs-config": "^1.0.0", "@lerna-lite/cli": "^4.9.4", + "@lerna-lite/exec": "^4.9.4", "@lerna-lite/publish": "^4.9.4", "@lerna-lite/run": "^4.9.4", "@lerna-lite/version": "^4.9.4", diff --git a/yarn.lock b/yarn.lock index 640fffc476..87f9514527 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1905,6 +1905,20 @@ resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b" integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A== +"@lerna-lite/cli@4.10.2": + version "4.10.2" + resolved "https://registry.yarnpkg.com/@lerna-lite/cli/-/cli-4.10.2.tgz#43fac58feafd9cc663c6f78d4d8692b3cf8e6a1a" + integrity sha512-yT5/z/FvVKWO9NDF7VTN5bYkuF96yH5VPl1I6n37BOOj/KFtPSG2SEMclbkJ+CCYw/kmV7sxHeodIyWnpJ8Ggw== + dependencies: + "@lerna-lite/core" "4.10.2" + "@lerna-lite/init" "4.10.2" + "@lerna-lite/npmlog" "4.10.0" + dedent "^1.7.0" + dotenv "^17.2.3" + import-local "^3.2.0" + load-json-file "^7.0.1" + yargs "^18.0.0" + "@lerna-lite/cli@4.9.4", "@lerna-lite/cli@^4.9.4": version "4.9.4" resolved "https://registry.yarnpkg.com/@lerna-lite/cli/-/cli-4.9.4.tgz#e378f4ce0e29cad08e74d5e9a05923e5b183ebbb" @@ -1919,6 +1933,38 @@ load-json-file "^7.0.1" yargs "^18.0.0" +"@lerna-lite/core@4.10.2": + version "4.10.2" + resolved "https://registry.yarnpkg.com/@lerna-lite/core/-/core-4.10.2.tgz#784a0454dd33256589b854d75b5db95e3a62c5d9" + integrity sha512-LTGO6tWIBHi5clHECz5tqhtWCUedlyX4n93SbIP1n4ehPS+qSuHh0fJ3QvQQX18jvktKFwaR7AKzrL21lxtGYQ== + dependencies: + "@inquirer/expand" "^4.0.23" + "@inquirer/input" "^4.3.1" + "@inquirer/select" "^4.4.2" + "@lerna-lite/npmlog" "4.10.0" + "@npmcli/run-script" "^10.0.3" + ci-info "^4.3.1" + config-chain "^1.1.13" + dedent "^1.7.0" + execa "^9.6.1" + fs-extra "^11.3.2" + glob-parent "^6.0.2" + json5 "^2.2.3" + lilconfig "^3.1.3" + load-json-file "^7.0.1" + npm-package-arg "^13.0.2" + p-map "^7.0.4" + p-queue "^9.0.1" + semver "^7.7.3" + slash "^5.1.0" + tinyglobby "^0.2.15" + tinyrainbow "^3.0.3" + write-file-atomic "^7.0.0" + write-json-file "^7.0.0" + write-package "^7.2.0" + yaml "^2.8.2" + zeptomatch "^2.1.0" + "@lerna-lite/core@4.9.4": version "4.9.4" resolved "https://registry.yarnpkg.com/@lerna-lite/core/-/core-4.9.4.tgz#12940802a0911307bc842258c8605b639962d7d3" @@ -1951,6 +1997,28 @@ yaml "^2.8.1" zeptomatch "^2.1.0" +"@lerna-lite/exec@^4.9.4": + version "4.10.2" + resolved "https://registry.yarnpkg.com/@lerna-lite/exec/-/exec-4.10.2.tgz#dfd0e5ba07c38b67bfcdf9324ea16e06fb50d4a8" + integrity sha512-18wY/LcFpvWt7eIW3SyEJ6aaSUrhj10eZV6EAOtBan+b4x+U05WEqHgSXdOGDaiEinkrO5tToR/cvY7j9+gM2g== + dependencies: + "@lerna-lite/cli" "4.10.2" + "@lerna-lite/core" "4.10.2" + "@lerna-lite/profiler" "4.10.2" + dotenv "^17.2.3" + p-map "^7.0.4" + tinyrainbow "^3.0.3" + +"@lerna-lite/init@4.10.2": + version "4.10.2" + resolved "https://registry.yarnpkg.com/@lerna-lite/init/-/init-4.10.2.tgz#8fc5288f0dd8753c6821e005b963f84a924131f8" + integrity sha512-TNnSQ7ewaY/jSvJGQSTRQT+vHt/wa3LKEEM7jkH4VSG4wCbzIV2u3xCOZ1n+oVaBaH2FO54qUKaqU8qruxVy4g== + dependencies: + "@lerna-lite/core" "4.10.2" + fs-extra "^11.3.2" + p-map "^7.0.4" + write-json-file "^7.0.0" + "@lerna-lite/init@4.9.4": version "4.9.4" resolved "https://registry.yarnpkg.com/@lerna-lite/init/-/init-4.9.4.tgz#59ec9cdc5966cc3860590884bfb1738f5ce6b38e" @@ -1961,6 +2029,19 @@ p-map "^7.0.4" write-json-file "^7.0.0" +"@lerna-lite/npmlog@4.10.0": + version "4.10.0" + resolved "https://registry.yarnpkg.com/@lerna-lite/npmlog/-/npmlog-4.10.0.tgz#add4715f0f91ee0494aaaf491a11bbb1480a3629" + integrity sha512-vwI9qbhbbEjZJW/xXcOypqbIp3QXjsFD0kxGeHpGWXheeMtQSkRicJHH6v2dwVFid10EQmET47ItlCRAMhp12g== + dependencies: + aproba "^2.1.0" + fast-string-width "^3.0.2" + has-unicode "^2.0.1" + set-blocking "^2.0.0" + signal-exit "^4.1.0" + tinyrainbow "^3.0.3" + wide-align "^1.1.5" + "@lerna-lite/npmlog@4.9.4": version "4.9.4" resolved "https://registry.yarnpkg.com/@lerna-lite/npmlog/-/npmlog-4.9.4.tgz#2afdaaaad2347ce9fe0299261387fe762ed68111" @@ -1974,6 +2055,16 @@ tinyrainbow "^3.0.3" wide-align "^1.1.5" +"@lerna-lite/profiler@4.10.2": + version "4.10.2" + resolved "https://registry.yarnpkg.com/@lerna-lite/profiler/-/profiler-4.10.2.tgz#ea20b51cc32088c0f4b23b49fdccf92ed6d5206d" + integrity sha512-JAjHF74g52AOJFxxOcF3lA2G1BcEjU6nZ3E/F7tJ+P5ct4Hc6VaMsP61OHYLUjNWzIq3L8y3csVHX1i4MyzNIA== + dependencies: + "@lerna-lite/core" "4.10.2" + "@lerna-lite/npmlog" "4.10.0" + fs-extra "^11.3.2" + upath "^2.0.1" + "@lerna-lite/profiler@4.9.4": version "4.9.4" resolved "https://registry.yarnpkg.com/@lerna-lite/profiler/-/profiler-4.9.4.tgz#52c230d5d018b84b6a3c7d22222974abbdf947c0" @@ -5949,7 +6040,7 @@ evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: md5.js "^1.3.4" safe-buffer "^5.1.1" -execa@^9.6.0: +execa@^9.6.0, execa@^9.6.1: version "9.6.1" resolved "https://registry.yarnpkg.com/execa/-/execa-9.6.1.tgz#5b90acedc6bdc0fa9b9a6ddf8f9cbb0c75a7c471" integrity sha512-9Be3ZoN4LmYR90tUoVu2te2BsbzHfhJyfEiAVfz7N5/zv+jduIfLrV2xdQXOHbaD6KgpGdO9PRPM1Y4Q9QkPkA== @@ -12019,7 +12110,7 @@ yaml@^2.7.0: resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.7.0.tgz#aef9bb617a64c937a9a748803786ad8d3ffe1e98" integrity sha512-+hSoy/QHluxmC9kCIJyL/uyFmLmc+e5CFR5Wa+bpIhIj85LVb9ZH2nVnqrHoSvKogwODv0ClqZkmiSSaIH5LTA== -yaml@^2.8.1: +yaml@^2.8.1, yaml@^2.8.2: version "2.8.2" resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.8.2.tgz#5694f25eca0ce9c3e7a9d9e00ce0ddabbd9e35c5" integrity sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A== From f4236afdba9d040d4ba3f893c72295c8a159d2ac Mon Sep 17 00:00:00 2001 From: Nico Flaig Date: Wed, 17 Dec 2025 12:32:41 +0100 Subject: [PATCH 09/20] chore: delete unused eth1 data from existing databases (#8696) Suggestion from https://github.com/ChainSafe/lodestar/pull/8692#pullrequestreview-3580953573 --- packages/beacon-node/src/db/beacon.ts | 39 +++++++++++++++++++++++- packages/beacon-node/src/db/buckets.ts | 18 +++++++---- packages/beacon-node/src/db/interface.ts | 2 ++ packages/beacon-node/src/node/nodejs.ts | 11 +++++++ 4 files changed, 63 insertions(+), 7 deletions(-) diff --git a/packages/beacon-node/src/db/beacon.ts b/packages/beacon-node/src/db/beacon.ts index 944e5fc29e..4a67931532 100644 --- a/packages/beacon-node/src/db/beacon.ts +++ b/packages/beacon-node/src/db/beacon.ts @@ -1,5 +1,6 @@ import {ChainForkConfig} from "@lodestar/config"; -import {Db, LevelDbControllerMetrics} from "@lodestar/db"; +import {Db, LevelDbControllerMetrics, encodeKey} from "@lodestar/db"; +import {Bucket} from "./buckets.js"; import {IBeaconDb} from "./interface.js"; import {CheckpointStateRepository} from "./repositories/checkpointState.js"; import { @@ -95,4 +96,40 @@ export class BeaconDb implements IBeaconDb { // TODO: Enable once it's deemed safe // await this.block.batchDelete(await this.block.keys()); } + + async deleteDeprecatedEth1Data(): Promise { + const deprecatedBuckets = [ + Bucket.phase0_eth1Data, + Bucket.index_depositDataRoot, + Bucket.phase0_depositData, + Bucket.phase0_depositEvent, + Bucket.phase0_preGenesisState, + Bucket.phase0_preGenesisStateLastProcessedBlock, + ]; + + for (const bucket of deprecatedBuckets) { + await this.deleteBucketData(bucket); + } + } + + private async deleteBucketData(bucket: Bucket): Promise { + const minKey = encodeKey(bucket, Buffer.alloc(0)); + const maxKey = encodeKey(bucket + 1, Buffer.alloc(0)); + + // Batch delete to avoid loading all keys into memory at once + const BATCH_DELETE_SIZE = 1000; + let keysBatch: Uint8Array[] = []; + + for await (const key of this.db.keysStream({gte: minKey, lt: maxKey})) { + keysBatch.push(key); + if (keysBatch.length >= BATCH_DELETE_SIZE) { + await this.db.batchDelete(keysBatch); + keysBatch = []; + } + } + + if (keysBatch.length > 0) { + await this.db.batchDelete(keysBatch); + } + } } diff --git a/packages/beacon-node/src/db/buckets.ts b/packages/beacon-node/src/db/buckets.ts index 1d16244025..5252199f43 100644 --- a/packages/beacon-node/src/db/buckets.ts +++ b/packages/beacon-node/src/db/buckets.ts @@ -16,13 +16,16 @@ export enum Bucket { index_mainChain = 6, // Slot -> Root // justified, finalized state and block hashes index_chainInfo = 7, // Key -> Number64 | stateHash | blockHash - // phase0_eth1Data = 8, // DEPRECATED - eth1 deposit tracking is not required since electra - // index_depositDataRoot = 9, // DEPRECATED - eth1 deposit tracking is not required since electra + /** @deprecated Eth1 deposit tracking is not required since electra, only kept around to delete data from existing databases */ + phase0_eth1Data = 8, + /** @deprecated Eth1 deposit tracking is not required since electra, only kept around to delete data from existing databases */ + index_depositDataRoot = 9, // op pool // phase0_attestation = 10, // DEPRECATED on v0.25.0 // phase0_aggregateAndProof = 11, // Root -> AggregateAndProof, DEPRECATED on v.27.0 - // phase0_depositData = 12, // DEPRECATED - eth1 deposit tracking is not required since electra + /** @deprecated Eth1 deposit tracking is not required since electra, only kept around to delete data from existing databases */ + phase0_depositData = 12, phase0_exit = 13, // ValidatorIndex -> VoluntaryExit phase0_proposerSlashing = 14, // ValidatorIndex -> ProposerSlashing allForks_attesterSlashing = 15, // Root -> AttesterSlashing @@ -31,15 +34,18 @@ export enum Bucket { allForks_checkpointState = 17, // Root -> BeaconState // allForks_pendingBlock = 25, // Root -> SignedBeaconBlock // DEPRECATED on v0.30.0 - // phase0_depositEvent = 19, // DEPRECATED - eth1 deposit tracking is not required since electra + /** @deprecated Eth1 deposit tracking is not required since electra, only kept around to delete data from existing databases */ + phase0_depositEvent = 19, index_stateArchiveRootIndex = 26, // State Root -> slot deneb_blobSidecars = 27, // DENEB BeaconBlockRoot -> BlobSidecars deneb_blobSidecarsArchive = 28, // DENEB BeaconBlockSlot -> BlobSidecars - // phase0_preGenesisState = 30, // DEPRECATED - genesis from eth1 is no longer supported - // phase0_preGenesisStateLastProcessedBlock = 31, // DEPRECATED - genesis from eth1 is no longer supported + /** @deprecated Genesis from eth1 is no longer supported, only kept around to delete data from existing databases */ + phase0_preGenesisState = 30, + /** @deprecated Genesis from eth1 is no longer supported, only kept around to delete data from existing databases */ + phase0_preGenesisStateLastProcessedBlock = 31, // Lightclient server // altair_bestUpdatePerCommitteePeriod = 30, // DEPRECATED on v0.32.0 diff --git a/packages/beacon-node/src/db/interface.ts b/packages/beacon-node/src/db/interface.ts index 1f186ae76a..c87d106be3 100644 --- a/packages/beacon-node/src/db/interface.ts +++ b/packages/beacon-node/src/db/interface.ts @@ -56,6 +56,8 @@ export interface IBeaconDb { pruneHotDb(): Promise; + deleteDeprecatedEth1Data(): Promise; + /** Close the connection to the db instance and close the db store. */ close(): Promise; /** To inject metrics after CLI initialization */ diff --git a/packages/beacon-node/src/node/nodejs.ts b/packages/beacon-node/src/node/nodejs.ts index dff23b8743..5957c81695 100644 --- a/packages/beacon-node/src/node/nodejs.ts +++ b/packages/beacon-node/src/node/nodejs.ts @@ -197,6 +197,17 @@ export class BeaconNode { // TODO: Should this call be awaited? await db.pruneHotDb(); + // Delete deprecated eth1 data to free up disk space for users + logger.debug("Deleting deprecated eth1 data from database"); + const startTime = Date.now(); + db.deleteDeprecatedEth1Data() + .then(() => { + logger.debug("Deleted deprecated eth1 data", {durationMs: Date.now() - startTime}); + }) + .catch((e) => { + logger.error("Failed to delete deprecated eth1 data", {}, e); + }); + const monitoring = opts.monitoring.endpoint ? new MonitoringService( "beacon", From c151a164f2c2fbe5e55d93b7bfc0594deab7038a Mon Sep 17 00:00:00 2001 From: twoeths <10568965+twoeths@users.noreply.github.com> Date: Thu, 18 Dec 2025 09:25:53 +0700 Subject: [PATCH 10/20] chore: use config from beacon chain (#8703) **Motivation** - as a preparation for lodestar-z integration, we should not access config from any cached BeaconState **Description** - use chain.config instead part of #8652 --------- Co-authored-by: Tuyen Nguyen --- .../src/chain/blocks/verifyBlock.ts | 1 + .../chain/blocks/verifyBlocksSignatures.ts | 4 ++- packages/beacon-node/src/chain/chain.ts | 10 +++++--- .../opPools/aggregatedAttestationPool.ts | 14 +++++------ .../beacon-node/src/chain/opPools/opPool.ts | 16 ++++++------ .../chain/produceBlock/produceBlockBody.ts | 4 +-- .../src/chain/rewards/attestationsRewards.ts | 17 ++++++++++--- .../src/chain/rewards/blockRewards.ts | 9 ++++--- .../src/chain/rewards/syncCommitteeRewards.ts | 4 ++- .../stateCache/persistentCheckpointsCache.ts | 17 +++++++++++-- .../src/chain/validation/attesterSlashing.ts | 2 +- .../beacon-node/src/chain/validation/block.ts | 2 +- .../chain/validation/blsToExecutionChange.ts | 2 +- .../src/chain/validation/proposerSlashing.ts | 2 +- .../signatureSets/aggregateAndProof.ts | 2 +- .../signatureSets/contributionAndProof.ts | 4 ++- .../validation/signatureSets/syncCommittee.ts | 4 ++- .../syncCommitteeContribution.ts | 4 ++- .../syncCommitteeSelectionProof.ts | 3 ++- .../src/chain/validation/syncCommittee.ts | 2 +- .../syncCommitteeContributionAndProof.ts | 11 +++++--- .../src/chain/validation/voluntaryExit.ts | 2 +- .../beacon-node/src/sync/backfill/backfill.ts | 11 +++++--- .../beacon-node/src/sync/backfill/verify.ts | 4 ++- .../test/mocks/mockedBeaconChain.ts | 6 ++--- .../opPools/aggregatedAttestationPool.test.ts | 4 +-- .../test/perf/chain/opPools/opPool.test.ts | 7 ++++-- .../opPools/aggregatedAttestationPool.test.ts | 25 +++++++++---------- .../persistentCheckpointsCache.test.ts | 9 +++++++ .../api/impl/validator/produceBlockV3.test.ts | 2 +- .../test/unit/chain/regen/regen.test.ts | 4 +++ .../unit/chain/rewards/blockRewards.test.ts | 10 +++++++- .../chain/validation/attesterSlashing.test.ts | 5 +++- .../test/unit/chain/validation/block.test.ts | 9 ++++--- .../validation/blsToExecutionChange.test.ts | 2 +- .../chain/validation/proposerSlashing.test.ts | 5 +++- .../chain/validation/syncCommittee.test.ts | 9 ++++--- .../chain/validation/voluntaryExit.test.ts | 14 ++++++----- .../src/block/isValidIndexedAttestation.ts | 7 ++++-- .../src/block/processAttestationPhase0.ts | 1 + .../src/block/processAttestationsAltair.ts | 8 +++++- .../src/block/processAttesterSlashing.ts | 2 +- .../src/block/processProposerSlashing.ts | 7 +++++- .../src/block/processRandao.ts | 2 +- .../src/block/processSyncCommittee.ts | 12 +++++++-- .../src/block/processVoluntaryExit.ts | 5 +++- .../state-transition/src/cache/stateCache.ts | 1 + .../src/signatureSets/attesterSlashings.ts | 10 +++++--- .../src/signatureSets/index.ts | 15 ++++++----- .../src/signatureSets/indexedAttestation.ts | 12 ++++++--- .../src/signatureSets/proposer.ts | 6 +++-- .../src/signatureSets/proposerSlashings.ts | 11 ++++---- .../src/signatureSets/randao.ts | 7 ++++-- .../src/signatureSets/voluntaryExits.ts | 10 +++++--- .../state-transition/src/stateTransition.ts | 5 +++- .../block/isValidIndexedAttestation.test.ts | 2 +- .../unit/signatureSets/signatureSets.test.ts | 8 +++++- 57 files changed, 268 insertions(+), 125 deletions(-) diff --git a/packages/beacon-node/src/chain/blocks/verifyBlock.ts b/packages/beacon-node/src/chain/blocks/verifyBlock.ts index e74ce2b759..9b4e7900fb 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlock.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlock.ts @@ -139,6 +139,7 @@ export async function verifyBlocksInEpoch( // All signatures at once opts.skipVerifyBlockSignatures !== true ? verifyBlocksSignatures( + this.config, this.index2pubkey, this.bls, this.logger, diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts index ac13688a75..af63e01f84 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts @@ -1,3 +1,4 @@ +import {BeaconConfig} from "@lodestar/config"; import {CachedBeaconStateAllForks, Index2PubkeyCache, getBlockSignatureSets} from "@lodestar/state-transition"; import {IndexedAttestation, SignedBeaconBlock} from "@lodestar/types"; import {Logger} from "@lodestar/utils"; @@ -15,6 +16,7 @@ import {ImportBlockOpts} from "./types.js"; * Since all data is known in advance all signatures are verified at once in parallel. */ export async function verifyBlocksSignatures( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, bls: IBlsVerifier, logger: Logger, @@ -39,7 +41,7 @@ export async function verifyBlocksSignatures( : // // Verify signatures per block to track which block is invalid bls.verifySignatureSets( - getBlockSignatureSets(index2pubkey, preState0, block, indexedAttestationsByBlock[i], { + getBlockSignatureSets(config, index2pubkey, preState0, block, indexedAttestationsByBlock[i], { skipProposerSignature: opts.validProposerSignature, }) ); diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index e35e1f1406..ac6037b1f6 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -142,7 +142,7 @@ export class BeaconChain implements IBeaconChain { readonly aggregatedAttestationPool: AggregatedAttestationPool; readonly syncCommitteeMessagePool: SyncCommitteeMessagePool; readonly syncContributionAndProofPool; - readonly opPool = new OpPool(); + readonly opPool: OpPool; // Gossip seen cache readonly seenAttesters = new SeenAttesters(); @@ -260,6 +260,7 @@ export class BeaconChain implements IBeaconChain { this.aggregatedAttestationPool = new AggregatedAttestationPool(this.config, metrics); this.syncCommitteeMessagePool = new SyncCommitteeMessagePool(config, clock, this.opts?.preaggregateSlotDistance); this.syncContributionAndProofPool = new SyncContributionAndProofPool(config, clock, metrics, logger); + this.opPool = new OpPool(config); this.seenAggregatedAttestations = new SeenAggregatedAttestations(metrics); this.seenContributionAndProof = new SeenContributionAndProof(metrics); @@ -334,6 +335,7 @@ export class BeaconChain implements IBeaconChain { this.cpStateDatastore = fileDataStore ? new FileCPStateDatastore(dataDir) : new DbCPStateDatastore(this.db); checkpointStateCache = new PersistentCheckpointStateCache( { + config, metrics, logger, clock, @@ -1306,7 +1308,7 @@ export class BeaconChain implements IBeaconChain { const postState = this.regen.getStateSync(toRootHex(block.stateRoot)) ?? undefined; - return computeBlockRewards(block, preState.clone(), postState?.clone()); + return computeBlockRewards(this.config, block, preState.clone(), postState?.clone()); } async getAttestationsRewards( @@ -1330,7 +1332,7 @@ export class BeaconChain implements IBeaconChain { throw Error(`State is not in cache for slot ${slot}`); } - const rewards = await computeAttestationsRewards(this.pubkey2index, cachedState, validatorIds); + const rewards = await computeAttestationsRewards(this.config, this.pubkey2index, cachedState, validatorIds); return {rewards, executionOptimistic, finalized}; } @@ -1347,6 +1349,6 @@ export class BeaconChain implements IBeaconChain { preState = processSlots(preState, block.slot); // Dial preState's slot to block.slot - return computeSyncCommitteeRewards(this.index2pubkey, block, preState.clone(), validatorIds); + return computeSyncCommitteeRewards(this.config, this.index2pubkey, block, preState.clone(), validatorIds); } } diff --git a/packages/beacon-node/src/chain/opPools/aggregatedAttestationPool.ts b/packages/beacon-node/src/chain/opPools/aggregatedAttestationPool.ts index 3e65aba3eb..038ef03135 100644 --- a/packages/beacon-node/src/chain/opPools/aggregatedAttestationPool.ts +++ b/packages/beacon-node/src/chain/opPools/aggregatedAttestationPool.ts @@ -1,6 +1,6 @@ import {Signature, aggregateSignatures} from "@chainsafe/blst"; import {BitArray} from "@chainsafe/ssz"; -import {ChainForkConfig} from "@lodestar/config"; +import {BeaconConfig} from "@lodestar/config"; import {IForkChoice} from "@lodestar/fork-choice"; import { ForkName, @@ -162,7 +162,7 @@ export class AggregatedAttestationPool { private lowestPermissibleSlot = 0; constructor( - private readonly config: ChainForkConfig, + private readonly config: BeaconConfig, private readonly metrics: Metrics | null = null ) { metrics?.opPool.aggregatedAttestationPool.attDataPerSlot.addCollect(() => this.onScrapeMetrics(metrics)); @@ -249,7 +249,7 @@ export class AggregatedAttestationPool { const stateEpoch = state.epochCtx.epoch; const statePrevEpoch = stateEpoch - 1; - const notSeenValidatorsFn = getNotSeenValidatorsFn(state); + const notSeenValidatorsFn = getNotSeenValidatorsFn(this.config, state); const validateAttestationDataFn = getValidateAttestationDataFn(forkChoice, state); const attestationsByScore: AttestationWithScore[] = []; @@ -362,7 +362,7 @@ export class AggregatedAttestationPool { const statePrevEpoch = stateEpoch - 1; const rootCache = new RootCache(state); - const notSeenValidatorsFn = getNotSeenValidatorsFn(state); + const notSeenValidatorsFn = getNotSeenValidatorsFn(this.config, state); const validateAttestationDataFn = getValidateAttestationDataFn(forkChoice, state); const slots = Array.from(this.attestationGroupByIndexByDataHexBySlot.keys()).sort((a, b) => b - a); @@ -656,7 +656,7 @@ export class MatchingDataAttestationGroup { private readonly attestations: AttestationWithIndex[] = []; constructor( - private readonly config: ChainForkConfig, + private readonly config: BeaconConfig, readonly committee: Uint32Array, readonly data: phase0.AttestationData ) {} @@ -864,9 +864,9 @@ export function aggregateConsolidation({byCommittee, attData}: AttestationsConso * Pre-compute participation from a CachedBeaconStateAllForks, for use to check if an attestation's committee * has already attested or not. */ -export function getNotSeenValidatorsFn(state: CachedBeaconStateAllForks): GetNotSeenValidatorsFn { +export function getNotSeenValidatorsFn(config: BeaconConfig, state: CachedBeaconStateAllForks): GetNotSeenValidatorsFn { const stateSlot = state.slot; - if (state.config.getForkName(stateSlot) === ForkName.phase0) { + if (config.getForkName(stateSlot) === ForkName.phase0) { // Get attestations to be included in a phase0 block. // As we are close to altair, this is not really important, it's mainly for e2e. // The performance is not great due to the different BeaconState data structure to altair. diff --git a/packages/beacon-node/src/chain/opPools/opPool.ts b/packages/beacon-node/src/chain/opPools/opPool.ts index 7bec42bdb0..fb9900aa20 100644 --- a/packages/beacon-node/src/chain/opPools/opPool.ts +++ b/packages/beacon-node/src/chain/opPools/opPool.ts @@ -1,3 +1,4 @@ +import {BeaconConfig} from "@lodestar/config"; import {Id, Repository} from "@lodestar/db"; import { BLS_WITHDRAWAL_PREFIX, @@ -51,6 +52,8 @@ export class OpPool { /** Map of validator index -> SignedBLSToExecutionChange */ private readonly blsToExecutionChanges = new Map(); + constructor(private readonly config: BeaconConfig) {} + // Getters for metrics get attesterSlashingsSize(): number { @@ -191,9 +194,8 @@ export class OpPool { phase0.SignedVoluntaryExit[], capella.SignedBLSToExecutionChange[], ] { - const {config} = state; const stateEpoch = computeEpochAtSlot(state.slot); - const stateFork = config.getForkSeq(state.slot); + const stateFork = this.config.getForkSeq(state.slot); const toBeSlashedIndices = new Set(); const proposerSlashings: phase0.ProposerSlashing[] = []; @@ -265,7 +267,7 @@ export class OpPool { // a future fork. isVoluntaryExitSignatureIncludable( stateFork, - config.getForkSeq(computeStartSlotAtEpoch(voluntaryExit.message.epoch)) + this.config.getForkSeq(computeStartSlotAtEpoch(voluntaryExit.message.epoch)) ) ) { voluntaryExits.push(voluntaryExit); @@ -368,14 +370,13 @@ export class OpPool { * Prune if validator has already exited at or before the finalized checkpoint of the head. */ private pruneVoluntaryExits(headState: CachedBeaconStateAllForks): void { - const {config} = headState; - const headStateFork = config.getForkSeq(headState.slot); + const headStateFork = this.config.getForkSeq(headState.slot); const finalizedEpoch = headState.finalizedCheckpoint.epoch; for (const [key, voluntaryExit] of this.voluntaryExits.entries()) { // VoluntaryExit messages signed in the previous fork become invalid and can never be included in any future // block, so just drop as the head state advances into the next fork. - if (config.getForkSeq(computeStartSlotAtEpoch(voluntaryExit.message.epoch)) < headStateFork) { + if (this.config.getForkSeq(computeStartSlotAtEpoch(voluntaryExit.message.epoch)) < headStateFork) { this.voluntaryExits.delete(key); } @@ -392,9 +393,8 @@ export class OpPool { * to opPool once gossipsub seen cache TTL passes. */ private pruneBlsToExecutionChanges(headBlock: SignedBeaconBlock, headState: CachedBeaconStateAllForks): void { - const {config} = headState; const recentBlsToExecutionChanges = - config.getForkSeq(headBlock.message.slot) >= ForkSeq.capella + this.config.getForkSeq(headBlock.message.slot) >= ForkSeq.capella ? (headBlock as capella.SignedBeaconBlock).message.body.blsToExecutionChanges : []; diff --git a/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts b/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts index f3850a7972..0dc25a7b4a 100644 --- a/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts +++ b/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts @@ -165,7 +165,7 @@ export async function produceBlockBody( // even though shouldOverrideBuilder is relevant for the engine response, for simplicity of typing // we just return it undefined for the builder which anyway doesn't get consumed downstream let shouldOverrideBuilder: boolean | undefined; - const fork = currentState.config.getForkName(blockSlot); + const fork = this.config.getForkName(blockSlot); const produceResult = { type: blockType, fork, @@ -644,7 +644,7 @@ export async function produceCommonBlockBody( ? this.metrics?.executionBlockProductionTimeSteps : this.metrics?.builderBlockProductionTimeSteps; - const fork = currentState.config.getForkName(slot); + const fork = this.config.getForkName(slot); // TODO: // Iterate through the naive aggregation pool and ensure all the attestations from there diff --git a/packages/beacon-node/src/chain/rewards/attestationsRewards.ts b/packages/beacon-node/src/chain/rewards/attestationsRewards.ts index 36fc07c15b..7e53b4d943 100644 --- a/packages/beacon-node/src/chain/rewards/attestationsRewards.ts +++ b/packages/beacon-node/src/chain/rewards/attestationsRewards.ts @@ -1,5 +1,6 @@ import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; import {routes} from "@lodestar/api"; +import {BeaconConfig} from "@lodestar/config"; import { EFFECTIVE_BALANCE_INCREMENT, ForkName, @@ -38,11 +39,12 @@ const defaultAttestationsReward = {head: 0, target: 0, source: 0, inclusionDelay const defaultAttestationsPenalty = {target: 0, source: 0}; export async function computeAttestationsRewards( + config: BeaconConfig, pubkey2index: PubkeyIndexMap, state: CachedBeaconStateAllForks, validatorIds?: (ValidatorIndex | string)[] ): Promise { - const fork = state.config.getForkName(state.slot); + const fork = config.getForkName(state.slot); if (fork === ForkName.phase0) { throw Error("Unsupported fork. Attestations rewards calculation is not available in phase0"); } @@ -50,8 +52,13 @@ export async function computeAttestationsRewards( const stateAltair = state as CachedBeaconStateAltair; const transitionCache = beforeProcessEpoch(stateAltair); - const [idealRewards, penalties] = computeIdealAttestationsRewardsAndPenaltiesAltair(stateAltair, transitionCache); + const [idealRewards, penalties] = computeIdealAttestationsRewardsAndPenaltiesAltair( + config, + stateAltair, + transitionCache + ); const totalRewards = computeTotalAttestationsRewardsAltair( + config, pubkey2index, stateAltair, transitionCache, @@ -64,12 +71,13 @@ export async function computeAttestationsRewards( } function computeIdealAttestationsRewardsAndPenaltiesAltair( + config: BeaconConfig, state: CachedBeaconStateAllForks, transitionCache: EpochTransitionCache ): [IdealAttestationsReward[], AttestationsPenalty[]] { const baseRewardPerIncrement = transitionCache.baseRewardPerIncrement; const activeBalanceByIncrement = transitionCache.totalActiveStakeByIncrement; - const fork = state.config.getForkName(state.slot); + const fork = config.getForkName(state.slot); const maxEffectiveBalance = isForkPostElectra(fork) ? MAX_EFFECTIVE_BALANCE_ELECTRA : MAX_EFFECTIVE_BALANCE; const maxEffectiveBalanceByIncrement = Math.floor(maxEffectiveBalance / EFFECTIVE_BALANCE_INCREMENT); @@ -139,6 +147,7 @@ function computeIdealAttestationsRewardsAndPenaltiesAltair( // Same calculation as `getRewardsAndPenaltiesAltair` but returns the breakdown of rewards instead of aggregated function computeTotalAttestationsRewardsAltair( + config: BeaconConfig, pubkey2index: PubkeyIndexMap, state: CachedBeaconStateAltair, transitionCache: EpochTransitionCache, @@ -148,7 +157,7 @@ function computeTotalAttestationsRewardsAltair( ): TotalAttestationsReward[] { const rewards = []; const {flags} = transitionCache; - const {epochCtx, config} = state; + const {epochCtx} = state; const validatorIndices = validatorIds .map((id) => (typeof id === "number" ? id : pubkey2index.get(fromHex(id)))) .filter((index) => index !== undefined); // Validator indices to include in the result diff --git a/packages/beacon-node/src/chain/rewards/blockRewards.ts b/packages/beacon-node/src/chain/rewards/blockRewards.ts index 67bd2f1265..3bdc791042 100644 --- a/packages/beacon-node/src/chain/rewards/blockRewards.ts +++ b/packages/beacon-node/src/chain/rewards/blockRewards.ts @@ -1,4 +1,5 @@ import {routes} from "@lodestar/api"; +import {BeaconConfig} from "@lodestar/config"; import { ForkName, WHISTLEBLOWER_REWARD_QUOTIENT, @@ -26,11 +27,12 @@ type SubRewardValue = number; // All reward values should be integer * 3) Reporting slashable behaviours from proposer and attester */ export async function computeBlockRewards( + config: BeaconConfig, block: BeaconBlock, preState: CachedBeaconStateAllForks, postState?: CachedBeaconStateAllForks ): Promise { - const fork = preState.config.getForkName(block.slot); + const fork = config.getForkName(block.slot); const {attestations: cachedAttestationsReward = 0, syncAggregate: cachedSyncAggregateReward = 0} = postState?.proposerRewards ?? {}; let blockAttestationReward = cachedAttestationsReward; @@ -40,7 +42,7 @@ export async function computeBlockRewards( blockAttestationReward = fork === ForkName.phase0 ? computeBlockAttestationRewardPhase0(block as phase0.BeaconBlock, preState as CachedBeaconStatePhase0) - : computeBlockAttestationRewardAltair(block as altair.BeaconBlock, preState as CachedBeaconStateAltair); + : computeBlockAttestationRewardAltair(config, block as altair.BeaconBlock, preState as CachedBeaconStateAltair); } if (syncAggregateReward === 0) { @@ -78,10 +80,11 @@ function computeBlockAttestationRewardPhase0( * Reuses `processAttestationsAltair()`. Has dependency on RewardCache */ function computeBlockAttestationRewardAltair( + config: BeaconConfig, block: altair.BeaconBlock, preState: CachedBeaconStateAltair ): SubRewardValue { - const fork = preState.config.getForkSeq(block.slot); + const fork = config.getForkSeq(block.slot); const {attestations} = block.body; processAttestationsAltair(fork, preState, attestations, false); diff --git a/packages/beacon-node/src/chain/rewards/syncCommitteeRewards.ts b/packages/beacon-node/src/chain/rewards/syncCommitteeRewards.ts index de3638a71e..caba088177 100644 --- a/packages/beacon-node/src/chain/rewards/syncCommitteeRewards.ts +++ b/packages/beacon-node/src/chain/rewards/syncCommitteeRewards.ts @@ -1,4 +1,5 @@ import {routes} from "@lodestar/api"; +import {BeaconConfig} from "@lodestar/config"; import {ForkName, SYNC_COMMITTEE_SIZE} from "@lodestar/params"; import {CachedBeaconStateAllForks, CachedBeaconStateAltair, Index2PubkeyCache} from "@lodestar/state-transition"; import {BeaconBlock, ValidatorIndex, altair} from "@lodestar/types"; @@ -7,12 +8,13 @@ export type SyncCommitteeRewards = routes.beacon.SyncCommitteeRewards; type BalanceRecord = {val: number}; // Use val for convenient way to increment/decrement balance export async function computeSyncCommitteeRewards( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, block: BeaconBlock, preState: CachedBeaconStateAllForks, validatorIds: (ValidatorIndex | string)[] = [] ): Promise { - const fork = preState.config.getForkName(block.slot); + const fork = config.getForkName(block.slot); if (fork === ForkName.phase0) { throw Error("Cannot get sync rewards as phase0 block does not have sync committee"); } diff --git a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts index b10841413b..86afbbf0fe 100644 --- a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts +++ b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts @@ -1,4 +1,5 @@ import {routes} from "@lodestar/api"; +import {BeaconConfig} from "@lodestar/config"; import { CachedBeaconStateAllForks, computeStartSlotAtEpoch, @@ -24,6 +25,7 @@ export type PersistentCheckpointStateCacheOpts = { }; type PersistentCheckpointStateCacheModules = { + config: BeaconConfig; metrics?: Metrics | null; logger: Logger; clock?: IClock | null; @@ -107,6 +109,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { private readonly cache: MapTracker; /** Epoch -> Set */ private readonly epochIndex = new MapDef>(() => new Set()); + private readonly config: BeaconConfig; private readonly metrics: Metrics | null | undefined; private readonly logger: Logger; private readonly clock: IClock | null | undefined; @@ -120,10 +123,20 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { private readonly bufferPool?: BufferPool | null; constructor( - {metrics, logger, clock, signal, datastore, blockStateCache, bufferPool}: PersistentCheckpointStateCacheModules, + { + config, + metrics, + logger, + clock, + signal, + datastore, + blockStateCache, + bufferPool, + }: PersistentCheckpointStateCacheModules, opts: PersistentCheckpointStateCacheOpts ) { this.cache = new MapTracker(metrics?.cpStateCache); + this.config = config; if (metrics) { this.metrics = metrics; metrics.cpStateCache.size.addCollect(() => { @@ -484,7 +497,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { } const blockSlot = state.slot; - const processCPStatesTimeMs = state.config.getSlotComponentDurationMs(PROCESS_CHECKPOINT_STATES_BPS); + const processCPStatesTimeMs = this.config.getSlotComponentDurationMs(PROCESS_CHECKPOINT_STATES_BPS); // we always have clock in production, fallback value is only for test const msFromSlot = this.clock?.msFromSlot(blockSlot) ?? processCPStatesTimeMs; const msToProcessCPStates = processCPStatesTimeMs - msFromSlot; diff --git a/packages/beacon-node/src/chain/validation/attesterSlashing.ts b/packages/beacon-node/src/chain/validation/attesterSlashing.ts index a672604007..abdca9a420 100644 --- a/packages/beacon-node/src/chain/validation/attesterSlashing.ts +++ b/packages/beacon-node/src/chain/validation/attesterSlashing.ts @@ -51,7 +51,7 @@ export async function validateAttesterSlashing( }); } - const signatureSets = getAttesterSlashingSignatureSets(chain.index2pubkey, state, attesterSlashing); + const signatureSets = getAttesterSlashingSignatureSets(chain.config, chain.index2pubkey, state, attesterSlashing); if (!(await chain.bls.verifySignatureSets(signatureSets, {batchable: true, priority: prioritizeBls}))) { throw new AttesterSlashingError(GossipAction.REJECT, { code: AttesterSlashingErrorCode.INVALID, diff --git a/packages/beacon-node/src/chain/validation/block.ts b/packages/beacon-node/src/chain/validation/block.ts index b68f30d6b0..44fa18457c 100644 --- a/packages/beacon-node/src/chain/validation/block.ts +++ b/packages/beacon-node/src/chain/validation/block.ts @@ -153,7 +153,7 @@ export async function validateGossipBlock( // [REJECT] The proposer signature, signed_beacon_block.signature, is valid with respect to the proposer_index pubkey. if (!chain.seenBlockInputCache.isVerifiedProposerSignature(blockSlot, blockRoot, signedBlock.signature)) { - const signatureSet = getBlockProposerSignatureSet(chain.index2pubkey, blockState, signedBlock); + const signatureSet = getBlockProposerSignatureSet(chain.config, chain.index2pubkey, blockState, signedBlock); // Don't batch so verification is not delayed if (!(await chain.bls.verifySignatureSets([signatureSet], {verifyOnMainThread: true}))) { throw new BlockGossipError(GossipAction.REJECT, { diff --git a/packages/beacon-node/src/chain/validation/blsToExecutionChange.ts b/packages/beacon-node/src/chain/validation/blsToExecutionChange.ts index 53e8b6f5d2..6225fa979d 100644 --- a/packages/beacon-node/src/chain/validation/blsToExecutionChange.ts +++ b/packages/beacon-node/src/chain/validation/blsToExecutionChange.ts @@ -41,7 +41,7 @@ async function validateBlsToExecutionChange( // NOTE: No need to advance head state since the signature's fork is handled with `broadcastedOnFork`, // and chanes relevant to `isValidBlsToExecutionChange()` happen only on processBlock(), not processEpoch() const state = chain.getHeadState(); - const {config} = state; + const {config} = chain; // [REJECT] All of the conditions within process_bls_to_execution_change pass validation. // verifySignature = false, verified in batch below diff --git a/packages/beacon-node/src/chain/validation/proposerSlashing.ts b/packages/beacon-node/src/chain/validation/proposerSlashing.ts index 8825fabdbd..4d36295b75 100644 --- a/packages/beacon-node/src/chain/validation/proposerSlashing.ts +++ b/packages/beacon-node/src/chain/validation/proposerSlashing.ts @@ -44,7 +44,7 @@ async function validateProposerSlashing( }); } - const signatureSets = getProposerSlashingSignatureSets(chain.index2pubkey, state, proposerSlashing); + const signatureSets = getProposerSlashingSignatureSets(chain.config, chain.index2pubkey, state, proposerSlashing); if (!(await chain.bls.verifySignatureSets(signatureSets, {batchable: true, priority: prioritizeBls}))) { throw new ProposerSlashingError(GossipAction.REJECT, { code: ProposerSlashingErrorCode.INVALID, diff --git a/packages/beacon-node/src/chain/validation/signatureSets/aggregateAndProof.ts b/packages/beacon-node/src/chain/validation/signatureSets/aggregateAndProof.ts index caf09d29c8..9726241c37 100644 --- a/packages/beacon-node/src/chain/validation/signatureSets/aggregateAndProof.ts +++ b/packages/beacon-node/src/chain/validation/signatureSets/aggregateAndProof.ts @@ -14,7 +14,7 @@ export function getAggregateAndProofSigningRoot( epoch: Epoch, aggregateAndProof: SignedAggregateAndProof ): Uint8Array { - // previously, we call `const aggregatorDomain = state.config.getDomain(state.slot, DOMAIN_AGGREGATE_AND_PROOF, slot);` + // previously, we call `const aggregatorDomain = config.getDomain(state.slot, DOMAIN_AGGREGATE_AND_PROOF, slot);` // at fork boundary, it's required to dial to target epoch https://github.com/ChainSafe/lodestar/blob/v1.11.3/packages/beacon-node/src/chain/validation/attestation.ts#L573 // instead of that, just use the fork of slot in the attestation data const slot = computeStartSlotAtEpoch(epoch); diff --git a/packages/beacon-node/src/chain/validation/signatureSets/contributionAndProof.ts b/packages/beacon-node/src/chain/validation/signatureSets/contributionAndProof.ts index d56febdda5..5495a08e8e 100644 --- a/packages/beacon-node/src/chain/validation/signatureSets/contributionAndProof.ts +++ b/packages/beacon-node/src/chain/validation/signatureSets/contributionAndProof.ts @@ -1,3 +1,4 @@ +import {BeaconConfig} from "@lodestar/config"; import {DOMAIN_CONTRIBUTION_AND_PROOF} from "@lodestar/params"; import { CachedBeaconStateAllForks, @@ -9,11 +10,12 @@ import { import {altair, ssz} from "@lodestar/types"; export function getContributionAndProofSignatureSet( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedContributionAndProof: altair.SignedContributionAndProof ): ISignatureSet { - const domain = state.config.getDomain( + const domain = config.getDomain( state.slot, DOMAIN_CONTRIBUTION_AND_PROOF, signedContributionAndProof.message.contribution.slot diff --git a/packages/beacon-node/src/chain/validation/signatureSets/syncCommittee.ts b/packages/beacon-node/src/chain/validation/signatureSets/syncCommittee.ts index 489b92d2a6..f91dbd5556 100644 --- a/packages/beacon-node/src/chain/validation/signatureSets/syncCommittee.ts +++ b/packages/beacon-node/src/chain/validation/signatureSets/syncCommittee.ts @@ -1,3 +1,4 @@ +import {BeaconConfig} from "@lodestar/config"; import {DOMAIN_SYNC_COMMITTEE} from "@lodestar/params"; import { CachedBeaconStateAllForks, @@ -9,11 +10,12 @@ import { import {altair, ssz} from "@lodestar/types"; export function getSyncCommitteeSignatureSet( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, syncCommittee: altair.SyncCommitteeMessage ): ISignatureSet { - const domain = state.config.getDomain(state.slot, DOMAIN_SYNC_COMMITTEE, syncCommittee.slot); + const domain = config.getDomain(state.slot, DOMAIN_SYNC_COMMITTEE, syncCommittee.slot); return { type: SignatureSetType.single, diff --git a/packages/beacon-node/src/chain/validation/signatureSets/syncCommitteeContribution.ts b/packages/beacon-node/src/chain/validation/signatureSets/syncCommitteeContribution.ts index f444c48a21..a105078e33 100644 --- a/packages/beacon-node/src/chain/validation/signatureSets/syncCommitteeContribution.ts +++ b/packages/beacon-node/src/chain/validation/signatureSets/syncCommitteeContribution.ts @@ -1,14 +1,16 @@ import {PublicKey} from "@chainsafe/blst"; +import {BeaconConfig} from "@lodestar/config"; import {DOMAIN_SYNC_COMMITTEE} from "@lodestar/params"; import {CachedBeaconStateAltair, ISignatureSet, SignatureSetType, computeSigningRoot} from "@lodestar/state-transition"; import {altair, ssz} from "@lodestar/types"; export function getSyncCommitteeContributionSignatureSet( + config: BeaconConfig, state: CachedBeaconStateAltair, contribution: altair.SyncCommitteeContribution, pubkeys: PublicKey[] ): ISignatureSet { - const domain = state.config.getDomain(state.slot, DOMAIN_SYNC_COMMITTEE, contribution.slot); + const domain = config.getDomain(state.slot, DOMAIN_SYNC_COMMITTEE, contribution.slot); return { type: SignatureSetType.aggregate, pubkeys, diff --git a/packages/beacon-node/src/chain/validation/signatureSets/syncCommitteeSelectionProof.ts b/packages/beacon-node/src/chain/validation/signatureSets/syncCommitteeSelectionProof.ts index cda51529d4..8b93b4259c 100644 --- a/packages/beacon-node/src/chain/validation/signatureSets/syncCommitteeSelectionProof.ts +++ b/packages/beacon-node/src/chain/validation/signatureSets/syncCommitteeSelectionProof.ts @@ -1,3 +1,4 @@ +import {BeaconConfig} from "@lodestar/config"; import {DOMAIN_SYNC_COMMITTEE_SELECTION_PROOF} from "@lodestar/params"; import { CachedBeaconStateAllForks, @@ -9,11 +10,11 @@ import { import {altair, ssz} from "@lodestar/types"; export function getSyncCommitteeSelectionProofSignatureSet( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, contributionAndProof: altair.ContributionAndProof ): ISignatureSet { - const {config} = state; const slot = contributionAndProof.contribution.slot; const domain = config.getDomain(state.slot, DOMAIN_SYNC_COMMITTEE_SELECTION_PROOF, slot); const signingData: altair.SyncAggregatorSelectionData = { diff --git a/packages/beacon-node/src/chain/validation/syncCommittee.ts b/packages/beacon-node/src/chain/validation/syncCommittee.ts index 6995157bf1..c3606ca40f 100644 --- a/packages/beacon-node/src/chain/validation/syncCommittee.ts +++ b/packages/beacon-node/src/chain/validation/syncCommittee.ts @@ -89,7 +89,7 @@ async function validateSyncCommitteeSigOnly( syncCommittee: altair.SyncCommitteeMessage, prioritizeBls = false ): Promise { - const signatureSet = getSyncCommitteeSignatureSet(chain.index2pubkey, headState, syncCommittee); + const signatureSet = getSyncCommitteeSignatureSet(chain.config, chain.index2pubkey, headState, syncCommittee); if (!(await chain.bls.verifySignatureSets([signatureSet], {batchable: true, priority: prioritizeBls}))) { throw new SyncCommitteeError(GossipAction.REJECT, { code: SyncCommitteeErrorCode.INVALID_SIGNATURE, diff --git a/packages/beacon-node/src/chain/validation/syncCommitteeContributionAndProof.ts b/packages/beacon-node/src/chain/validation/syncCommitteeContributionAndProof.ts index e026b728c1..577c684f32 100644 --- a/packages/beacon-node/src/chain/validation/syncCommitteeContributionAndProof.ts +++ b/packages/beacon-node/src/chain/validation/syncCommitteeContributionAndProof.ts @@ -78,14 +78,19 @@ export async function validateSyncCommitteeGossipContributionAndProof( const signatureSets = [ // [REJECT] The contribution_and_proof.selection_proof is a valid signature of the SyncAggregatorSelectionData // derived from the contribution by the validator with index contribution_and_proof.aggregator_index. - getSyncCommitteeSelectionProofSignatureSet(index2pubkey, headState, contributionAndProof), + getSyncCommitteeSelectionProofSignatureSet(chain.config, index2pubkey, headState, contributionAndProof), // [REJECT] The aggregator signature, signed_contribution_and_proof.signature, is valid. - getContributionAndProofSignatureSet(index2pubkey, headState, signedContributionAndProof), + getContributionAndProofSignatureSet(chain.config, index2pubkey, headState, signedContributionAndProof), // [REJECT] The aggregate signature is valid for the message beacon_block_root and aggregate pubkey derived from // the participation info in aggregation_bits for the subcommittee specified by the contribution.subcommittee_index. - getSyncCommitteeContributionSignatureSet(headState as CachedBeaconStateAltair, contribution, participantPubkeys), + getSyncCommitteeContributionSignatureSet( + chain.config, + headState as CachedBeaconStateAltair, + contribution, + participantPubkeys + ), ]; if (!(await chain.bls.verifySignatureSets(signatureSets, {batchable: true}))) { diff --git a/packages/beacon-node/src/chain/validation/voluntaryExit.ts b/packages/beacon-node/src/chain/validation/voluntaryExit.ts index 4c3f212e73..b72d9d6260 100644 --- a/packages/beacon-node/src/chain/validation/voluntaryExit.ts +++ b/packages/beacon-node/src/chain/validation/voluntaryExit.ts @@ -59,7 +59,7 @@ async function validateVoluntaryExit( }); } - const signatureSet = getVoluntaryExitSignatureSet(chain.index2pubkey, state, voluntaryExit); + const signatureSet = getVoluntaryExitSignatureSet(chain.config, chain.index2pubkey, state, voluntaryExit); if (!(await chain.bls.verifySignatureSets([signatureSet], {batchable: true, priority: prioritizeBls}))) { throw new VoluntaryExitError(GossipAction.REJECT, { code: VoluntaryExitErrorCode.INVALID_SIGNATURE, diff --git a/packages/beacon-node/src/sync/backfill/backfill.ts b/packages/beacon-node/src/sync/backfill/backfill.ts index e62827dca5..9018bf7e0a 100644 --- a/packages/beacon-node/src/sync/backfill/backfill.ts +++ b/packages/beacon-node/src/sync/backfill/backfill.ts @@ -750,9 +750,13 @@ export class BackfillSync extends (EventEmitter as {new (): BackfillSyncEmitter} // GENESIS_SLOT doesn't has valid signature if (anchorBlock.message.slot === GENESIS_SLOT) return; - await verifyBlockProposerSignature(this.chain.index2pubkey, this.chain.bls, this.chain.getHeadState(), [ - anchorBlock, - ]); + await verifyBlockProposerSignature( + this.chain.config, + this.chain.index2pubkey, + this.chain.bls, + this.chain.getHeadState(), + [anchorBlock] + ); // We can write to the disk if this is ahead of prevFinalizedCheckpointBlock otherwise // we will need to go make checks on the top of sync loop before writing as it might @@ -818,6 +822,7 @@ export class BackfillSync extends (EventEmitter as {new (): BackfillSyncEmitter} // If any of the block's proposer signature fail, we can't trust this peer at all if (verifiedBlocks.length > 0) { await verifyBlockProposerSignature( + this.chain.config, this.chain.index2pubkey, this.chain.bls, this.chain.getHeadState(), diff --git a/packages/beacon-node/src/sync/backfill/verify.ts b/packages/beacon-node/src/sync/backfill/verify.ts index a0e09e96ee..262f694125 100644 --- a/packages/beacon-node/src/sync/backfill/verify.ts +++ b/packages/beacon-node/src/sync/backfill/verify.ts @@ -46,6 +46,7 @@ export function verifyBlockSequence( } export async function verifyBlockProposerSignature( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, bls: IBlsVerifier, state: CachedBeaconStateAllForks, @@ -54,7 +55,8 @@ export async function verifyBlockProposerSignature( if (blocks.length === 1 && blocks[0].message.slot === GENESIS_SLOT) return; const signatures = blocks.reduce((sigs: ISignatureSet[], block) => { // genesis block doesn't have valid signature - if (block.message.slot !== GENESIS_SLOT) sigs.push(getBlockProposerSignatureSet(index2pubkey, state, block)); + if (block.message.slot !== GENESIS_SLOT) + sigs.push(getBlockProposerSignatureSet(config, index2pubkey, state, block)); return sigs; }, []); diff --git a/packages/beacon-node/test/mocks/mockedBeaconChain.ts b/packages/beacon-node/test/mocks/mockedBeaconChain.ts index 2aadb8d27f..5b049e2a1f 100644 --- a/packages/beacon-node/test/mocks/mockedBeaconChain.ts +++ b/packages/beacon-node/test/mocks/mockedBeaconChain.ts @@ -1,6 +1,6 @@ import {Mock, Mocked, vi} from "vitest"; import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; -import {ChainForkConfig} from "@lodestar/config"; +import {BeaconConfig, ChainForkConfig} from "@lodestar/config"; import {config as defaultConfig} from "@lodestar/config/default"; import {EpochDifference, ForkChoice, ProtoBlock} from "@lodestar/fork-choice"; import {Logger} from "@lodestar/utils"; @@ -133,8 +133,8 @@ vi.mock("../../src/chain/chain.js", async (importActual) => { getClientVersion: vi.fn(), }, executionBuilder: {}, - opPool: new OpPool(), - aggregatedAttestationPool: new AggregatedAttestationPool(config), + opPool: new OpPool(config as BeaconConfig), + aggregatedAttestationPool: new AggregatedAttestationPool(config as BeaconConfig), syncContributionAndProofPool: new SyncContributionAndProofPool(config, clock), // @ts-expect-error beaconProposerCache: new BeaconProposerCache(), diff --git a/packages/beacon-node/test/perf/chain/opPools/aggregatedAttestationPool.test.ts b/packages/beacon-node/test/perf/chain/opPools/aggregatedAttestationPool.test.ts index 08d0481d01..612e487620 100644 --- a/packages/beacon-node/test/perf/chain/opPools/aggregatedAttestationPool.test.ts +++ b/packages/beacon-node/test/perf/chain/opPools/aggregatedAttestationPool.test.ts @@ -1,6 +1,6 @@ import {beforeAll, bench, describe} from "@chainsafe/benchmark"; import {BitArray, toHexString} from "@chainsafe/ssz"; -import {createChainForkConfig, defaultChainConfig} from "@lodestar/config"; +import {createBeaconConfig, defaultChainConfig} from "@lodestar/config"; import {ExecutionStatus, ForkChoice, IForkChoiceStore, ProtoArray} from "@lodestar/fork-choice"; import {HISTORICAL_ROOTS_LIMIT, SLOTS_PER_EPOCH} from "@lodestar/params"; import { @@ -232,7 +232,7 @@ function getAggregatedAttestationPool( numMissedVotes: number, numBadVotes: number ): AggregatedAttestationPool { - const config = createChainForkConfig(defaultChainConfig); + const config = createBeaconConfig(defaultChainConfig, Buffer.alloc(32, 0xaa)); const pool = new AggregatedAttestationPool(config); for (let epochSlot = 0; epochSlot < SLOTS_PER_EPOCH; epochSlot++) { diff --git a/packages/beacon-node/test/perf/chain/opPools/opPool.test.ts b/packages/beacon-node/test/perf/chain/opPools/opPool.test.ts index 104ee51b77..b3f0bf44a3 100644 --- a/packages/beacon-node/test/perf/chain/opPools/opPool.test.ts +++ b/packages/beacon-node/test/perf/chain/opPools/opPool.test.ts @@ -1,4 +1,6 @@ import {beforeAll, bench, describe} from "@chainsafe/benchmark"; +import {createBeaconConfig} from "@lodestar/config"; +import {chainConfig as chainConfigDef} from "@lodestar/config/default"; import { ForkName, MAX_ATTESTER_SLASHINGS, @@ -20,6 +22,7 @@ import { describe("opPool", () => { let originalState: CachedBeaconStateAltair; + const config = createBeaconConfig(chainConfigDef, Buffer.alloc(32, 0xaa)); beforeAll( () => { @@ -31,7 +34,7 @@ describe("opPool", () => { bench({ id: "getSlashingsAndExits - default max", beforeEach: () => { - const pool = new OpPool(); + const pool = new OpPool(config); fillAttesterSlashing(pool, originalState, MAX_ATTESTER_SLASHINGS); fillProposerSlashing(pool, originalState, MAX_PROPOSER_SLASHINGS); fillVoluntaryExits(pool, originalState, MAX_VOLUNTARY_EXITS); @@ -48,7 +51,7 @@ describe("opPool", () => { bench({ id: "getSlashingsAndExits - 2k", beforeEach: () => { - const pool = new OpPool(); + const pool = new OpPool(config); const maxItemsInPool = 2_000; fillAttesterSlashing(pool, originalState, maxItemsInPool); diff --git a/packages/beacon-node/test/unit-minimal/chain/opPools/aggregatedAttestationPool.test.ts b/packages/beacon-node/test/unit-minimal/chain/opPools/aggregatedAttestationPool.test.ts index 249c9d1594..f27b1134f0 100644 --- a/packages/beacon-node/test/unit-minimal/chain/opPools/aggregatedAttestationPool.test.ts +++ b/packages/beacon-node/test/unit-minimal/chain/opPools/aggregatedAttestationPool.test.ts @@ -1,7 +1,8 @@ import {afterEach, beforeAll, beforeEach, describe, expect, it, vi} from "vitest"; import {SecretKey, Signature, aggregateSignatures, fastAggregateVerify} from "@chainsafe/blst"; import {BitArray, fromHexString, toHexString} from "@chainsafe/ssz"; -import {createChainForkConfig, defaultChainConfig} from "@lodestar/config"; +import {createBeaconConfig, createChainForkConfig} from "@lodestar/config"; +import {chainConfig as chainConfigDefault} from "@lodestar/config/default"; import { ACTIVE_PRESET, FAR_FUTURE_EPOCH, @@ -48,9 +49,6 @@ describe("AggregatedAttestationPool - Altair", () => { let pool: AggregatedAttestationPool; const fork = ForkName.altair; - const config = createChainForkConfig({ - ...defaultChainConfig, - }); const altairForkEpoch = 2020; const currentEpoch = altairForkEpoch + 10; const currentSlot = SLOTS_PER_EPOCH * currentEpoch; @@ -93,6 +91,10 @@ describe("AggregatedAttestationPool - Altair", () => { let altairState: CachedBeaconStateAllForks; let forkchoiceStub: MockedForkChoice; + const config = createBeaconConfig( + createChainForkConfig({...chainConfigDefault, ALTAIR_FORK_EPOCH: altairForkEpoch}), + originalState.genesisValidatorsRoot + ); beforeEach(() => { pool = new AggregatedAttestationPool(config); @@ -107,7 +109,7 @@ describe("AggregatedAttestationPool - Altair", () => { it("getNotSeenValidatorsFn", () => { // previousEpochParticipation and currentEpochParticipation is created inside generateCachedState // 0 and 1 are fully participated - const notSeenValidatorFn = getNotSeenValidatorsFn(altairState); + const notSeenValidatorFn = getNotSeenValidatorsFn(config, altairState); // seen attesting indices are 0, 1 => not seen are 2, 3 expect(notSeenValidatorFn(currentEpoch, currentSlot - 1, committeeIndex)).toEqual(new Set([2, 3])); // attestations in current slot are always included (since altairState.slot = currentSlot + 1) @@ -169,14 +171,15 @@ describe("AggregatedAttestationPool - get packed attestations - Electra", () => let pool: AggregatedAttestationPool; const fork = ForkName.electra; const electraForkEpoch = 2020; - const config = createChainForkConfig({ - ...defaultChainConfig, + const chainConfig = createChainForkConfig({ + ...chainConfigDefault, ALTAIR_FORK_EPOCH: 0, BELLATRIX_FORK_EPOCH: 0, CAPELLA_FORK_EPOCH: 0, DENEB_FORK_EPOCH: 0, ELECTRA_FORK_EPOCH: electraForkEpoch, }); + const config = createBeaconConfig(chainConfig, Buffer.alloc(32, 0xaa)); const currentEpoch = electraForkEpoch + 10; const currentSlot = SLOTS_PER_EPOCH * currentEpoch; @@ -393,9 +396,7 @@ describe("AggregatedAttestationPool - get packed attestations - Electra", () => }); describe("MatchingDataAttestationGroup.add()", () => { - const config = createChainForkConfig({ - ...defaultChainConfig, - }); + const config = createBeaconConfig(chainConfigDefault, Buffer.alloc(32, 0xaa)); const testCases: {id: string; attestationsToAdd: {bits: number[]; res: InsertOutcome; isKept: boolean}[]}[] = [ { @@ -465,9 +466,7 @@ describe("MatchingDataAttestationGroup.add()", () => { }); describe("MatchingDataAttestationGroup.getAttestationsForBlock", () => { - const config = createChainForkConfig({ - ...defaultChainConfig, - }); + const config = createBeaconConfig(chainConfigDefault, Buffer.alloc(32, 0xaa)); const maxAttestations = 2; const testCases: { diff --git a/packages/beacon-node/test/unit-minimal/chain/stateCache/persistentCheckpointsCache.test.ts b/packages/beacon-node/test/unit-minimal/chain/stateCache/persistentCheckpointsCache.test.ts index 2b4f127832..d3cfff7015 100644 --- a/packages/beacon-node/test/unit-minimal/chain/stateCache/persistentCheckpointsCache.test.ts +++ b/packages/beacon-node/test/unit-minimal/chain/stateCache/persistentCheckpointsCache.test.ts @@ -1,4 +1,6 @@ import {beforeAll, beforeEach, describe, expect, it} from "vitest"; +import {createBeaconConfig} from "@lodestar/config"; +import {chainConfig as chainConfigDef} from "@lodestar/config/default"; import {ACTIVE_PRESET, PresetName, SLOTS_PER_EPOCH, SLOTS_PER_HISTORICAL_ROOT} from "@lodestar/params"; import {CachedBeaconStateAllForks, computeEpochAtSlot, computeStartSlotAtEpoch} from "@lodestar/state-transition"; import {RootHex, phase0} from "@lodestar/types"; @@ -27,6 +29,7 @@ describe("PersistentCheckpointStateCache", () => { let fileApisBuffer: Map; let states: Record<"cp0a" | "cp0b" | "cp1" | "cp2", CachedBeaconStateAllForks>; let stateBytes: Record<"cp0a" | "cp0b" | "cp1" | "cp2", Uint8Array>; + const config = createBeaconConfig(chainConfigDef, Buffer.alloc(32, 0xaa)); beforeAll(() => { root0a = Buffer.alloc(32); @@ -91,6 +94,7 @@ describe("PersistentCheckpointStateCache", () => { const datastore = getTestDatastore(fileApisBuffer); cache = new PersistentCheckpointStateCache( { + config, datastore, logger: testLogger(), blockStateCache: new FIFOBlockStateCache({}, {}), @@ -165,6 +169,7 @@ describe("PersistentCheckpointStateCache", () => { const datastore = getTestDatastore(fileApisBuffer); cache = new PersistentCheckpointStateCache( { + config, datastore, logger: testLogger(), blockStateCache: new FIFOBlockStateCache({}, {}), @@ -241,6 +246,7 @@ describe("PersistentCheckpointStateCache", () => { const datastore = getTestDatastore(fileApisBuffer); cache = new PersistentCheckpointStateCache( { + config, datastore, logger: testLogger(), blockStateCache: new FIFOBlockStateCache({}, {}), @@ -546,6 +552,7 @@ describe("PersistentCheckpointStateCache", () => { const datastore = getTestDatastore(fileApisBuffer); cache = new PersistentCheckpointStateCache( { + config, datastore, logger: testLogger(), blockStateCache: new FIFOBlockStateCache({}, {}), @@ -817,6 +824,7 @@ describe("PersistentCheckpointStateCache", () => { const datastore = getTestDatastore(fileApisBuffer); cache = new PersistentCheckpointStateCache( { + config, datastore, logger: testLogger(), blockStateCache: new FIFOBlockStateCache({}, {}), @@ -907,6 +915,7 @@ describe("PersistentCheckpointStateCache", () => { const datastore = getTestDatastore(fileApisBuffer); cache = new PersistentCheckpointStateCache( { + config, datastore, logger: testLogger(), blockStateCache: new FIFOBlockStateCache({}, {}), diff --git a/packages/beacon-node/test/unit/api/impl/validator/produceBlockV3.test.ts b/packages/beacon-node/test/unit/api/impl/validator/produceBlockV3.test.ts index 71deb87f27..773e7930e4 100644 --- a/packages/beacon-node/test/unit/api/impl/validator/produceBlockV3.test.ts +++ b/packages/beacon-node/test/unit/api/impl/validator/produceBlockV3.test.ts @@ -32,7 +32,7 @@ describe("api/validator - produceBlockV3", () => { const config = createBeaconConfig(chainConfig, genesisValidatorsRoot); beforeEach(() => { - modules = getApiTestModules(); + modules = getApiTestModules({config}); api = getValidatorApi(defaultApiOptions, {...modules, config}); state = generateCachedBellatrixState(); diff --git a/packages/beacon-node/test/unit/chain/regen/regen.test.ts b/packages/beacon-node/test/unit/chain/regen/regen.test.ts index 59402b4629..941b9adcb8 100644 --- a/packages/beacon-node/test/unit/chain/regen/regen.test.ts +++ b/packages/beacon-node/test/unit/chain/regen/regen.test.ts @@ -1,4 +1,6 @@ import {beforeEach, describe, expect, it} from "vitest"; +import {createBeaconConfig} from "@lodestar/config"; +import {chainConfig as chainConfigDef} from "@lodestar/config/default"; import {SLOTS_PER_EPOCH, SLOTS_PER_HISTORICAL_ROOT} from "@lodestar/params"; import {computeEpochAtSlot, computeStartSlotAtEpoch} from "@lodestar/state-transition"; import {RegenCaller} from "../../../../src/chain/regen/interface.js"; @@ -10,6 +12,7 @@ import {testLogger} from "../../../utils/logger.js"; import {generateCachedState} from "../../../utils/state.js"; describe("regen", () => { + const config = createBeaconConfig(chainConfigDef, Buffer.alloc(32, 0xaa)); // // epoch: 19 20 21 22 23 // |-----------|-----------|-----------|-----------| @@ -74,6 +77,7 @@ describe("regen", () => { beforeEach(() => { cache = new PersistentCheckpointStateCache( { + config, datastore, logger: testLogger(), blockStateCache: new FIFOBlockStateCache({}, {}), diff --git a/packages/beacon-node/test/unit/chain/rewards/blockRewards.test.ts b/packages/beacon-node/test/unit/chain/rewards/blockRewards.test.ts index 928f9b8d85..67502b2f00 100644 --- a/packages/beacon-node/test/unit/chain/rewards/blockRewards.test.ts +++ b/packages/beacon-node/test/unit/chain/rewards/blockRewards.test.ts @@ -1,4 +1,6 @@ import {describe, expect, it, vi} from "vitest"; +import {createBeaconConfig} from "@lodestar/config"; +import {chainConfig as chainConfigDef} from "@lodestar/config/default"; import {SYNC_COMMITTEE_SIZE} from "@lodestar/params"; import { CachedBeaconStateAllForks, @@ -15,6 +17,7 @@ import { import {computeBlockRewards} from "../../../../src/chain/rewards/blockRewards.js"; describe("chain / rewards / blockRewards", () => { + const config = createBeaconConfig({...chainConfigDef, ALTAIR_FORK_EPOCH: 0}, Buffer.alloc(32, 0xaa)); const testCases: {id: string; timeout?: number; opts: BlockAltairOpts}[] = [ { id: "Normal case", @@ -92,7 +95,11 @@ describe("chain / rewards / blockRewards", () => { // Populate tree root caches of the state state.hashTreeRoot(); cachedStateAltairPopulateCaches(state); - const calculatedBlockReward = await computeBlockRewards(block.message, state as CachedBeaconStateAllForks); + const calculatedBlockReward = await computeBlockRewards( + config, + block.message, + state as CachedBeaconStateAllForks + ); const {proposerIndex, total, attestations, syncAggregate, proposerSlashings, attesterSlashings} = calculatedBlockReward; @@ -153,6 +160,7 @@ describe("chain / rewards / blockRewards", () => { postState.proposerRewards = {attestations: 1000, syncAggregate: 1001, slashing: 1002}; const calculatedBlockReward = await computeBlockRewards( + config, block.message, preState as CachedBeaconStateAllForks, postState diff --git a/packages/beacon-node/test/unit/chain/validation/attesterSlashing.test.ts b/packages/beacon-node/test/unit/chain/validation/attesterSlashing.test.ts index 0753e79108..89c5f6de33 100644 --- a/packages/beacon-node/test/unit/chain/validation/attesterSlashing.test.ts +++ b/packages/beacon-node/test/unit/chain/validation/attesterSlashing.test.ts @@ -1,4 +1,6 @@ import {afterEach, beforeEach, describe, it, vi} from "vitest"; +import {createBeaconConfig} from "@lodestar/config"; +import {chainConfig as chainConfigDef} from "@lodestar/config/default"; import {phase0, ssz} from "@lodestar/types"; import {AttesterSlashingErrorCode} from "../../../../src/chain/errors/attesterSlashingError.js"; import {validateGossipAttesterSlashing} from "../../../../src/chain/validation/attesterSlashing.js"; @@ -9,9 +11,10 @@ import {generateCachedState} from "../../../utils/state.js"; describe("GossipMessageValidator", () => { let chainStub: MockedBeaconChain; let opPool: MockedBeaconChain["opPool"]; + const config = createBeaconConfig(chainConfigDef, Buffer.alloc(32, 0xaa)); beforeEach(() => { - chainStub = getMockedBeaconChain(); + chainStub = getMockedBeaconChain({config}); opPool = chainStub.opPool; const state = generateCachedState(); diff --git a/packages/beacon-node/test/unit/chain/validation/block.test.ts b/packages/beacon-node/test/unit/chain/validation/block.test.ts index 7bcee2c2c8..29defe4c28 100644 --- a/packages/beacon-node/test/unit/chain/validation/block.test.ts +++ b/packages/beacon-node/test/unit/chain/validation/block.test.ts @@ -1,6 +1,6 @@ import {Mock, Mocked, beforeEach, describe, it, vi} from "vitest"; -import {createChainForkConfig} from "@lodestar/config"; -import {config} from "@lodestar/config/default"; +import {createBeaconConfig, createChainForkConfig} from "@lodestar/config"; +import {config as configDef} from "@lodestar/config/default"; import {ProtoBlock} from "@lodestar/fork-choice"; import {ForkName, ForkPostDeneb, ForkPreFulu} from "@lodestar/params"; import {SignedBeaconBlock, ssz} from "@lodestar/types"; @@ -26,15 +26,16 @@ describe("gossip block validation", () => { const signature = EMPTY_SIGNATURE; const maxSkipSlots = 10; const denebConfig = createChainForkConfig({ - ...config, + ...configDef, ALTAIR_FORK_EPOCH: 0, BELLATRIX_FORK_EPOCH: 0, CAPELLA_FORK_EPOCH: 0, DENEB_FORK_EPOCH: 0, }); + const config = createBeaconConfig(configDef, Buffer.alloc(32, 0xaa)); beforeEach(() => { - chain = getMockedBeaconChain(); + chain = getMockedBeaconChain({config}); vi.spyOn(chain.clock, "currentSlotWithGossipDisparity", "get").mockReturnValue(clockSlot); forkChoice = chain.forkChoice; forkChoice.getBlockHex.mockReturnValue(null); diff --git a/packages/beacon-node/test/unit/chain/validation/blsToExecutionChange.test.ts b/packages/beacon-node/test/unit/chain/validation/blsToExecutionChange.test.ts index 240ef52c4d..e0f1155ac9 100644 --- a/packages/beacon-node/test/unit/chain/validation/blsToExecutionChange.test.ts +++ b/packages/beacon-node/test/unit/chain/validation/blsToExecutionChange.test.ts @@ -83,7 +83,7 @@ describe("validate bls to execution change", () => { const signedBlsToExecChange = {message: blsToExecutionChange, signature: wsk.sign(signingRoot).toBytes()}; beforeEach(() => { - chainStub = getMockedBeaconChain(); + chainStub = getMockedBeaconChain({config}); opPool = chainStub.opPool; vi.spyOn(chainStub, "getHeadState").mockReturnValue(state); vi.spyOn(chainStub, "getHeadStateAtCurrentEpoch"); diff --git a/packages/beacon-node/test/unit/chain/validation/proposerSlashing.test.ts b/packages/beacon-node/test/unit/chain/validation/proposerSlashing.test.ts index 8e7c6484b6..16bc28add7 100644 --- a/packages/beacon-node/test/unit/chain/validation/proposerSlashing.test.ts +++ b/packages/beacon-node/test/unit/chain/validation/proposerSlashing.test.ts @@ -1,4 +1,6 @@ import {afterEach, beforeEach, describe, it, vi} from "vitest"; +import {createBeaconConfig} from "@lodestar/config"; +import {chainConfig as chainConfigDef} from "@lodestar/config/default"; import {phase0, ssz} from "@lodestar/types"; import {ProposerSlashingErrorCode} from "../../../../src/chain/errors/proposerSlashingError.js"; import {validateGossipProposerSlashing} from "../../../../src/chain/validation/proposerSlashing.js"; @@ -9,9 +11,10 @@ import {generateCachedState} from "../../../utils/state.js"; describe("validate proposer slashing", () => { let chainStub: MockedBeaconChain; let opPool: MockedBeaconChain["opPool"]; + const config = createBeaconConfig(chainConfigDef, Buffer.alloc(32, 0xaa)); beforeEach(() => { - chainStub = getMockedBeaconChain(); + chainStub = getMockedBeaconChain({config}); opPool = chainStub.opPool; const state = generateCachedState(); diff --git a/packages/beacon-node/test/unit/chain/validation/syncCommittee.test.ts b/packages/beacon-node/test/unit/chain/validation/syncCommittee.test.ts index ef6ade2587..f7fd01d0c5 100644 --- a/packages/beacon-node/test/unit/chain/validation/syncCommittee.test.ts +++ b/packages/beacon-node/test/unit/chain/validation/syncCommittee.test.ts @@ -1,6 +1,6 @@ import {Mock, afterAll, afterEach, beforeAll, beforeEach, describe, expect, it, vi} from "vitest"; import {toHexString} from "@chainsafe/ssz"; -import {createChainForkConfig, defaultChainConfig} from "@lodestar/config"; +import {createBeaconConfig, createChainForkConfig, defaultChainConfig} from "@lodestar/config"; import {SLOTS_PER_EPOCH} from "@lodestar/params"; import {Epoch, Slot, altair} from "@lodestar/types"; import {SyncCommitteeErrorCode} from "../../../../src/chain/errors/syncCommitteeError.js"; @@ -20,7 +20,10 @@ describe("Sync Committee Signature validation", () => { let altairForkEpochBk: Epoch; const altairForkEpoch = 2020; const currentSlot = SLOTS_PER_EPOCH * (altairForkEpoch + 1); - const config = createChainForkConfig(Object.assign({}, defaultChainConfig, {ALTAIR_FORK_EPOCH: altairForkEpoch})); + const chainConfig = createChainForkConfig( + Object.assign({}, defaultChainConfig, {ALTAIR_FORK_EPOCH: altairForkEpoch}) + ); + const config = createBeaconConfig(chainConfig, Buffer.alloc(32, 0xaa)); // all validators have same pubkey const validatorIndexInSyncCommittee = 15; @@ -34,7 +37,7 @@ describe("Sync Committee Signature validation", () => { }); beforeEach(() => { - chain = getMockedBeaconChain(); + chain = getMockedBeaconChain({config}); ( chain as { seenSyncCommitteeMessages: SeenSyncCommitteeMessages; diff --git a/packages/beacon-node/test/unit/chain/validation/voluntaryExit.test.ts b/packages/beacon-node/test/unit/chain/validation/voluntaryExit.test.ts index 2be4b3256d..8398bf2543 100644 --- a/packages/beacon-node/test/unit/chain/validation/voluntaryExit.test.ts +++ b/packages/beacon-node/test/unit/chain/validation/voluntaryExit.test.ts @@ -1,7 +1,7 @@ import {afterEach, beforeAll, beforeEach, describe, it, vi} from "vitest"; import {SecretKey} from "@chainsafe/blst"; -import {createBeaconConfig} from "@lodestar/config"; -import {config} from "@lodestar/config/default"; +import {BeaconConfig, createBeaconConfig, createChainForkConfig} from "@lodestar/config"; +import {chainConfig} from "@lodestar/config/default"; import {DOMAIN_VOLUNTARY_EXIT, FAR_FUTURE_EPOCH, SLOTS_PER_EPOCH} from "@lodestar/params"; import { CachedBeaconStateAllForks, @@ -22,6 +22,7 @@ describe("validate voluntary exit", () => { let state: CachedBeaconStateAllForks; let signedVoluntaryExit: phase0.SignedVoluntaryExit; let opPool: MockedBeaconChain["opPool"]; + let config: BeaconConfig; beforeAll(() => { const sk = SecretKey.fromKeygen(Buffer.alloc(32)); @@ -29,7 +30,7 @@ describe("validate voluntary exit", () => { const stateEmpty = ssz.phase0.BeaconState.defaultValue(); // Validator has to be active for long enough - stateEmpty.slot = config.SHARD_COMMITTEE_PERIOD * SLOTS_PER_EPOCH; + stateEmpty.slot = chainConfig.SHARD_COMMITTEE_PERIOD * SLOTS_PER_EPOCH; // Add a validator that's active since genesis and ready to exit const validator = ssz.phase0.Validator.toViewDU({ @@ -55,13 +56,14 @@ describe("validate voluntary exit", () => { ); const signingRoot = computeSigningRoot(ssz.phase0.VoluntaryExit, voluntaryExit, domain); signedVoluntaryExit = {message: voluntaryExit, signature: sk.sign(signingRoot).toBytes()}; - const _state = generateState(stateEmpty, config); + const _state = generateState(stateEmpty, createChainForkConfig(chainConfig)); + config = createBeaconConfig(chainConfig, _state.genesisValidatorsRoot); - state = createCachedBeaconStateTest(_state, createBeaconConfig(config, _state.genesisValidatorsRoot)); + state = createCachedBeaconStateTest(_state, config); }); beforeEach(() => { - chainStub = getMockedBeaconChain(); + chainStub = getMockedBeaconChain({config}); opPool = chainStub.opPool; vi.spyOn(chainStub, "getHeadStateAtCurrentEpoch").mockResolvedValue(state); vi.spyOn(opPool, "hasSeenBlsToExecutionChange"); diff --git a/packages/state-transition/src/block/isValidIndexedAttestation.ts b/packages/state-transition/src/block/isValidIndexedAttestation.ts index e215f1631a..26b9db9192 100644 --- a/packages/state-transition/src/block/isValidIndexedAttestation.ts +++ b/packages/state-transition/src/block/isValidIndexedAttestation.ts @@ -1,3 +1,4 @@ +import {BeaconConfig} from "@lodestar/config"; import {ForkSeq, MAX_COMMITTEES_PER_SLOT, MAX_VALIDATORS_PER_COMMITTEE} from "@lodestar/params"; import {IndexedAttestation, IndexedAttestationBigint} from "@lodestar/types"; import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; @@ -9,6 +10,7 @@ import {verifySignatureSet} from "../util/index.js"; * Check if `indexedAttestation` has sorted and unique indices and a valid aggregate signature. */ export function isValidIndexedAttestation( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, indexedAttestation: IndexedAttestation, @@ -19,12 +21,13 @@ export function isValidIndexedAttestation( } if (verifySignature) { - return verifySignatureSet(getIndexedAttestationSignatureSet(index2pubkey, state, indexedAttestation)); + return verifySignatureSet(getIndexedAttestationSignatureSet(config, index2pubkey, state, indexedAttestation)); } return true; } export function isValidIndexedAttestationBigint( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, indexedAttestation: IndexedAttestationBigint, @@ -35,7 +38,7 @@ export function isValidIndexedAttestationBigint( } if (verifySignature) { - return verifySignatureSet(getIndexedAttestationBigintSignatureSet(index2pubkey, state, indexedAttestation)); + return verifySignatureSet(getIndexedAttestationBigintSignatureSet(config, index2pubkey, state, indexedAttestation)); } return true; } diff --git a/packages/state-transition/src/block/processAttestationPhase0.ts b/packages/state-transition/src/block/processAttestationPhase0.ts index c5ab1a6f24..b50ce38c22 100644 --- a/packages/state-transition/src/block/processAttestationPhase0.ts +++ b/packages/state-transition/src/block/processAttestationPhase0.ts @@ -52,6 +52,7 @@ export function processAttestationPhase0( if ( !isValidIndexedAttestation( + state.config, epochCtx.index2pubkey, state, epochCtx.getIndexedAttestation(ForkSeq.phase0, attestation), diff --git a/packages/state-transition/src/block/processAttestationsAltair.ts b/packages/state-transition/src/block/processAttestationsAltair.ts index e467e9c79b..9218d22114 100644 --- a/packages/state-transition/src/block/processAttestationsAltair.ts +++ b/packages/state-transition/src/block/processAttestationsAltair.ts @@ -64,7 +64,13 @@ export function processAttestationsAltair( // TODO: Why should we verify an indexed attestation that we just created? If it's just for the signature // we can verify only that and nothing else. if (verifySignature) { - const sigSet = getAttestationWithIndicesSignatureSet(epochCtx.index2pubkey, state, attestation, attestingIndices); + const sigSet = getAttestationWithIndicesSignatureSet( + state.config, + epochCtx.index2pubkey, + state, + attestation, + attestingIndices + ); if (!verifySignatureSet(sigSet)) { throw new Error("Attestation signature is not valid"); } diff --git a/packages/state-transition/src/block/processAttesterSlashing.ts b/packages/state-transition/src/block/processAttesterSlashing.ts index 83766faf0f..91973d8ebb 100644 --- a/packages/state-transition/src/block/processAttesterSlashing.ts +++ b/packages/state-transition/src/block/processAttesterSlashing.ts @@ -55,7 +55,7 @@ export function assertValidAttesterSlashing( // be higher than the clock and the slashing would still be valid. Same applies to attestation data index, which // can be any arbitrary value. Must use bigint variants to hash correctly to all possible values for (const [i, attestation] of [attestation1, attestation2].entries()) { - if (!isValidIndexedAttestationBigint(index2pubkey, state, attestation, verifySignatures)) { + if (!isValidIndexedAttestationBigint(state.config, index2pubkey, state, attestation, verifySignatures)) { throw new Error(`AttesterSlashing attestation${i} is invalid`); } } diff --git a/packages/state-transition/src/block/processProposerSlashing.ts b/packages/state-transition/src/block/processProposerSlashing.ts index 8bfbc663c8..b51fcb08b5 100644 --- a/packages/state-transition/src/block/processProposerSlashing.ts +++ b/packages/state-transition/src/block/processProposerSlashing.ts @@ -77,7 +77,12 @@ export function assertValidProposerSlashing( // verify signatures if (verifySignatures) { - const signatureSets = getProposerSlashingSignatureSets(state.epochCtx.index2pubkey, state, proposerSlashing); + const signatureSets = getProposerSlashingSignatureSets( + state.config, + state.epochCtx.index2pubkey, + state, + proposerSlashing + ); for (let i = 0; i < signatureSets.length; i++) { if (!verifySignatureSet(signatureSets[i])) { throw new Error(`ProposerSlashing header${i + 1} signature invalid`); diff --git a/packages/state-transition/src/block/processRandao.ts b/packages/state-transition/src/block/processRandao.ts index f0640f9d6d..1f0ef25235 100644 --- a/packages/state-transition/src/block/processRandao.ts +++ b/packages/state-transition/src/block/processRandao.ts @@ -17,7 +17,7 @@ export function processRandao(state: CachedBeaconStateAllForks, block: BeaconBlo const randaoReveal = block.body.randaoReveal; // verify RANDAO reveal - if (verifySignature && !verifyRandaoSignature(epochCtx.index2pubkey, state, block)) { + if (verifySignature && !verifyRandaoSignature(state.config, epochCtx.index2pubkey, state, block)) { throw new Error("RANDAO reveal is an invalid signature"); } diff --git a/packages/state-transition/src/block/processSyncCommittee.ts b/packages/state-transition/src/block/processSyncCommittee.ts index 05495bfce8..b2dbe165a0 100644 --- a/packages/state-transition/src/block/processSyncCommittee.ts +++ b/packages/state-transition/src/block/processSyncCommittee.ts @@ -1,4 +1,5 @@ import {byteArrayEquals} from "@chainsafe/ssz"; +import {BeaconConfig} from "@lodestar/config"; import {DOMAIN_SYNC_COMMITTEE, SYNC_COMMITTEE_SIZE} from "@lodestar/params"; import {altair, ssz} from "@lodestar/types"; import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; @@ -24,7 +25,13 @@ export function processSyncAggregate( if (verifySignatures) { // This is to conform to the spec - we want the signature to be verified const participantIndices = block.body.syncAggregate.syncCommitteeBits.intersectValues(committeeIndices); - const signatureSet = getSyncCommitteeSignatureSet(state.epochCtx.index2pubkey, state, block, participantIndices); + const signatureSet = getSyncCommitteeSignatureSet( + state.config, + state.epochCtx.index2pubkey, + state, + block, + participantIndices + ); // When there's no participation we consider the signature valid and just ignore i if (signatureSet !== null && !verifySignatureSet(signatureSet)) { throw Error("Sync committee signature invalid"); @@ -64,6 +71,7 @@ export function processSyncAggregate( } export function getSyncCommitteeSignatureSet( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, block: altair.BeaconBlock, @@ -107,7 +115,7 @@ export function getSyncCommitteeSignatureSet( throw Error("Empty sync committee signature is not infinity"); } - const domain = state.config.getDomain(state.slot, DOMAIN_SYNC_COMMITTEE, previousSlot); + const domain = config.getDomain(state.slot, DOMAIN_SYNC_COMMITTEE, previousSlot); return { type: SignatureSetType.aggregate, diff --git a/packages/state-transition/src/block/processVoluntaryExit.ts b/packages/state-transition/src/block/processVoluntaryExit.ts index ec2892ba03..c3d1ce860b 100644 --- a/packages/state-transition/src/block/processVoluntaryExit.ts +++ b/packages/state-transition/src/block/processVoluntaryExit.ts @@ -74,7 +74,10 @@ export function getVoluntaryExitValidity( return VoluntaryExitValidity.pendingWithdrawals; } - if (verifySignature && !verifyVoluntaryExitSignature(epochCtx.index2pubkey, state, signedVoluntaryExit)) { + if ( + verifySignature && + !verifyVoluntaryExitSignature(state.config, epochCtx.index2pubkey, state, signedVoluntaryExit) + ) { return VoluntaryExitValidity.invalidSignature; } diff --git a/packages/state-transition/src/cache/stateCache.ts b/packages/state-transition/src/cache/stateCache.ts index 4e1093905d..231a921d96 100644 --- a/packages/state-transition/src/cache/stateCache.ts +++ b/packages/state-transition/src/cache/stateCache.ts @@ -17,6 +17,7 @@ import { } from "./types.js"; export type BeaconStateCache = { + /** @deprecated should not access config outside of state-transition package */ config: BeaconConfig; epochCtx: EpochCache; /** Count of clones created from this BeaconStateCache instance. readonly to prevent accidental usage downstream */ diff --git a/packages/state-transition/src/signatureSets/attesterSlashings.ts b/packages/state-transition/src/signatureSets/attesterSlashings.ts index 855a3cb590..335b5717ad 100644 --- a/packages/state-transition/src/signatureSets/attesterSlashings.ts +++ b/packages/state-transition/src/signatureSets/attesterSlashings.ts @@ -1,3 +1,4 @@ +import {BeaconConfig} from "@lodestar/config"; import {DOMAIN_BEACON_ATTESTER} from "@lodestar/params"; import {AttesterSlashing, IndexedAttestationBigint, SignedBeaconBlock, ssz} from "@lodestar/types"; import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; @@ -6,33 +7,36 @@ import {ISignatureSet, SignatureSetType, computeSigningRoot, computeStartSlotAtE /** Get signature sets from all AttesterSlashing objects in a block */ export function getAttesterSlashingsSignatureSets( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock ): ISignatureSet[] { return signedBlock.message.body.attesterSlashings.flatMap((attesterSlashing) => - getAttesterSlashingSignatureSets(index2pubkey, state, attesterSlashing) + getAttesterSlashingSignatureSets(config, index2pubkey, state, attesterSlashing) ); } /** Get signature sets from a single AttesterSlashing object */ export function getAttesterSlashingSignatureSets( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, attesterSlashing: AttesterSlashing ): ISignatureSet[] { return [attesterSlashing.attestation1, attesterSlashing.attestation2].map((attestation) => - getIndexedAttestationBigintSignatureSet(index2pubkey, state, attestation) + getIndexedAttestationBigintSignatureSet(config, index2pubkey, state, attestation) ); } export function getIndexedAttestationBigintSignatureSet( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, indexedAttestation: IndexedAttestationBigint ): ISignatureSet { const slot = computeStartSlotAtEpoch(Number(indexedAttestation.data.target.epoch as bigint)); - const domain = state.config.getDomain(state.slot, DOMAIN_BEACON_ATTESTER, slot); + const domain = config.getDomain(state.slot, DOMAIN_BEACON_ATTESTER, slot); return { type: SignatureSetType.aggregate, diff --git a/packages/state-transition/src/signatureSets/index.ts b/packages/state-transition/src/signatureSets/index.ts index d2c51a080c..3d3ad0254d 100644 --- a/packages/state-transition/src/signatureSets/index.ts +++ b/packages/state-transition/src/signatureSets/index.ts @@ -1,3 +1,4 @@ +import {BeaconConfig} from "@lodestar/config"; import {ForkSeq} from "@lodestar/params"; import {IndexedAttestation, SignedBeaconBlock, altair, capella} from "@lodestar/types"; import {getSyncCommitteeSignatureSet} from "../block/processSyncCommittee.js"; @@ -26,6 +27,7 @@ export * from "./voluntaryExits.js"; * Deposits are not included because they can legally have invalid signatures. */ export function getBlockSignatureSets( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock, @@ -39,20 +41,21 @@ export function getBlockSignatureSets( const fork = state.config.getForkSeq(signedBlock.message.slot); const signatureSets = [ - getRandaoRevealSignatureSet(index2pubkey, state, signedBlock.message), - ...getProposerSlashingsSignatureSets(index2pubkey, state, signedBlock), - ...getAttesterSlashingsSignatureSets(index2pubkey, state, signedBlock), - ...getAttestationsSignatureSets(index2pubkey, state, signedBlock, indexedAttestations), - ...getVoluntaryExitsSignatureSets(index2pubkey, state, signedBlock), + getRandaoRevealSignatureSet(config, index2pubkey, state, signedBlock.message), + ...getProposerSlashingsSignatureSets(config, index2pubkey, state, signedBlock), + ...getAttesterSlashingsSignatureSets(config, index2pubkey, state, signedBlock), + ...getAttestationsSignatureSets(config, index2pubkey, state, signedBlock, indexedAttestations), + ...getVoluntaryExitsSignatureSets(config, index2pubkey, state, signedBlock), ]; if (!opts?.skipProposerSignature) { - signatureSets.push(getBlockProposerSignatureSet(index2pubkey, state, signedBlock)); + signatureSets.push(getBlockProposerSignatureSet(config, index2pubkey, state, signedBlock)); } // Only after altair fork, validate tSyncCommitteeSignature if (fork >= ForkSeq.altair) { const syncCommitteeSignatureSet = getSyncCommitteeSignatureSet( + config, index2pubkey, state as CachedBeaconStateAltair, (signedBlock as altair.SignedBeaconBlock).message diff --git a/packages/state-transition/src/signatureSets/indexedAttestation.ts b/packages/state-transition/src/signatureSets/indexedAttestation.ts index 3ba35a501c..f3a9724736 100644 --- a/packages/state-transition/src/signatureSets/indexedAttestation.ts +++ b/packages/state-transition/src/signatureSets/indexedAttestation.ts @@ -1,3 +1,4 @@ +import {BeaconConfig} from "@lodestar/config"; import {DOMAIN_BEACON_ATTESTER} from "@lodestar/params"; import {IndexedAttestation, SignedBeaconBlock, phase0, ssz} from "@lodestar/types"; import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; @@ -10,16 +11,18 @@ import { } from "../util/index.js"; export function getAttestationDataSigningRoot( + config: BeaconConfig, state: CachedBeaconStateAllForks, data: phase0.AttestationData ): Uint8Array { const slot = computeStartSlotAtEpoch(data.target.epoch); - const domain = state.config.getDomain(state.slot, DOMAIN_BEACON_ATTESTER, slot); + const domain = config.getDomain(state.slot, DOMAIN_BEACON_ATTESTER, slot); return computeSigningRoot(ssz.phase0.AttestationData, data, domain); } export function getAttestationWithIndicesSignatureSet( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, attestation: Pick, @@ -27,17 +30,19 @@ export function getAttestationWithIndicesSignatureSet( ): ISignatureSet { return createAggregateSignatureSetFromComponents( attestingIndices.map((i) => index2pubkey[i]), - getAttestationDataSigningRoot(state, attestation.data), + getAttestationDataSigningRoot(config, state, attestation.data), attestation.signature ); } export function getIndexedAttestationSignatureSet( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, indexedAttestation: IndexedAttestation ): ISignatureSet { return getAttestationWithIndicesSignatureSet( + config, index2pubkey, state, indexedAttestation, @@ -46,6 +51,7 @@ export function getIndexedAttestationSignatureSet( } export function getAttestationsSignatureSets( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock, @@ -57,6 +63,6 @@ export function getAttestationsSignatureSets( ); } return indexedAttestations.map((indexedAttestation) => - getIndexedAttestationSignatureSet(index2pubkey, state, indexedAttestation) + getIndexedAttestationSignatureSet(config, index2pubkey, state, indexedAttestation) ); } diff --git a/packages/state-transition/src/signatureSets/proposer.ts b/packages/state-transition/src/signatureSets/proposer.ts index ec518d6ac2..3ffd346d31 100644 --- a/packages/state-transition/src/signatureSets/proposer.ts +++ b/packages/state-transition/src/signatureSets/proposer.ts @@ -1,3 +1,4 @@ +import {BeaconConfig} from "@lodestar/config"; import {DOMAIN_BEACON_PROPOSER} from "@lodestar/params"; import {SignedBeaconBlock, SignedBlindedBeaconBlock, Slot, isBlindedBeaconBlock, phase0, ssz} from "@lodestar/types"; import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; @@ -6,20 +7,21 @@ import {computeSigningRoot} from "../util/index.js"; import {ISignatureSet, SignatureSetType, verifySignatureSet} from "../util/signatureSets.js"; export function verifyProposerSignature( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock | SignedBlindedBeaconBlock ): boolean { - const signatureSet = getBlockProposerSignatureSet(index2pubkey, state, signedBlock); + const signatureSet = getBlockProposerSignatureSet(config, index2pubkey, state, signedBlock); return verifySignatureSet(signatureSet); } export function getBlockProposerSignatureSet( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock | SignedBlindedBeaconBlock ): ISignatureSet { - const {config} = state; const domain = config.getDomain(state.slot, DOMAIN_BEACON_PROPOSER, signedBlock.message.slot); const blockType = isBlindedBeaconBlock(signedBlock.message) diff --git a/packages/state-transition/src/signatureSets/proposerSlashings.ts b/packages/state-transition/src/signatureSets/proposerSlashings.ts index ab0294ca9f..218eef8d2d 100644 --- a/packages/state-transition/src/signatureSets/proposerSlashings.ts +++ b/packages/state-transition/src/signatureSets/proposerSlashings.ts @@ -1,3 +1,4 @@ +import {BeaconConfig} from "@lodestar/config"; import {DOMAIN_BEACON_PROPOSER} from "@lodestar/params"; import {SignedBeaconBlock, phase0, ssz} from "@lodestar/types"; import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; @@ -8,6 +9,7 @@ import {ISignatureSet, SignatureSetType, computeSigningRoot} from "../util/index * Extract signatures to allow validating all block signatures at once */ export function getProposerSlashingSignatureSets( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, proposerSlashing: phase0.ProposerSlashing @@ -17,11 +19,7 @@ export function getProposerSlashingSignatureSets( // In state transition, ProposerSlashing headers are only partially validated. Their slot could be higher than the // clock and the slashing would still be valid. Must use bigint variants to hash correctly to all possible values return [proposerSlashing.signedHeader1, proposerSlashing.signedHeader2].map((signedHeader): ISignatureSet => { - const domain = state.config.getDomain( - state.slot, - DOMAIN_BEACON_PROPOSER, - Number(signedHeader.message.slot as bigint) - ); + const domain = config.getDomain(state.slot, DOMAIN_BEACON_PROPOSER, Number(signedHeader.message.slot as bigint)); return { type: SignatureSetType.single, @@ -33,11 +31,12 @@ export function getProposerSlashingSignatureSets( } export function getProposerSlashingsSignatureSets( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock ): ISignatureSet[] { return signedBlock.message.body.proposerSlashings.flatMap((proposerSlashing) => - getProposerSlashingSignatureSets(index2pubkey, state, proposerSlashing) + getProposerSlashingSignatureSets(config, index2pubkey, state, proposerSlashing) ); } diff --git a/packages/state-transition/src/signatureSets/randao.ts b/packages/state-transition/src/signatureSets/randao.ts index 5a538093cc..3b6dc46376 100644 --- a/packages/state-transition/src/signatureSets/randao.ts +++ b/packages/state-transition/src/signatureSets/randao.ts @@ -1,3 +1,4 @@ +import {BeaconConfig} from "@lodestar/config"; import {DOMAIN_RANDAO} from "@lodestar/params"; import {BeaconBlock, ssz} from "@lodestar/types"; import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; @@ -11,24 +12,26 @@ import { } from "../util/index.js"; export function verifyRandaoSignature( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, block: BeaconBlock ): boolean { - return verifySignatureSet(getRandaoRevealSignatureSet(index2pubkey, state, block)); + return verifySignatureSet(getRandaoRevealSignatureSet(config, index2pubkey, state, block)); } /** * Extract signatures to allow validating all block signatures at once */ export function getRandaoRevealSignatureSet( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, block: BeaconBlock ): ISignatureSet { // should not get epoch from epochCtx const epoch = computeEpochAtSlot(block.slot); - const domain = state.config.getDomain(state.slot, DOMAIN_RANDAO, block.slot); + const domain = config.getDomain(state.slot, DOMAIN_RANDAO, block.slot); return { type: SignatureSetType.single, diff --git a/packages/state-transition/src/signatureSets/voluntaryExits.ts b/packages/state-transition/src/signatureSets/voluntaryExits.ts index e80c6e3a4c..b7d6e82b01 100644 --- a/packages/state-transition/src/signatureSets/voluntaryExits.ts +++ b/packages/state-transition/src/signatureSets/voluntaryExits.ts @@ -1,3 +1,4 @@ +import {BeaconConfig} from "@lodestar/config"; import {SignedBeaconBlock, phase0, ssz} from "@lodestar/types"; import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; import {CachedBeaconStateAllForks} from "../types.js"; @@ -10,23 +11,25 @@ import { } from "../util/index.js"; export function verifyVoluntaryExitSignature( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedVoluntaryExit: phase0.SignedVoluntaryExit ): boolean { - return verifySignatureSet(getVoluntaryExitSignatureSet(index2pubkey, state, signedVoluntaryExit)); + return verifySignatureSet(getVoluntaryExitSignatureSet(config, index2pubkey, state, signedVoluntaryExit)); } /** * Extract signatures to allow validating all block signatures at once */ export function getVoluntaryExitSignatureSet( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedVoluntaryExit: phase0.SignedVoluntaryExit ): ISignatureSet { const slot = computeStartSlotAtEpoch(signedVoluntaryExit.message.epoch); - const domain = state.config.getDomainForVoluntaryExit(state.slot, slot); + const domain = config.getDomainForVoluntaryExit(state.slot, slot); return { type: SignatureSetType.single, @@ -37,11 +40,12 @@ export function getVoluntaryExitSignatureSet( } export function getVoluntaryExitsSignatureSets( + config: BeaconConfig, index2pubkey: Index2PubkeyCache, state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock ): ISignatureSet[] { return signedBlock.message.body.voluntaryExits.map((voluntaryExit) => - getVoluntaryExitSignatureSet(index2pubkey, state, voluntaryExit) + getVoluntaryExitSignatureSet(config, index2pubkey, state, voluntaryExit) ); } diff --git a/packages/state-transition/src/stateTransition.ts b/packages/state-transition/src/stateTransition.ts index 53ff5668c3..3f2bc6147d 100644 --- a/packages/state-transition/src/stateTransition.ts +++ b/packages/state-transition/src/stateTransition.ts @@ -111,7 +111,10 @@ export function stateTransition( postState = processSlotsWithTransientCache(postState, blockSlot, options, {metrics, validatorMonitor}); // Verify proposer signature only - if (verifyProposer && !verifyProposerSignature(postState.epochCtx.index2pubkey, postState, signedBlock)) { + if ( + verifyProposer && + !verifyProposerSignature(postState.config, postState.epochCtx.index2pubkey, postState, signedBlock) + ) { throw new Error("Invalid block signature"); } diff --git a/packages/state-transition/test/unit/block/isValidIndexedAttestation.test.ts b/packages/state-transition/test/unit/block/isValidIndexedAttestation.test.ts index ac69dd21f6..9c48d8db47 100644 --- a/packages/state-transition/test/unit/block/isValidIndexedAttestation.test.ts +++ b/packages/state-transition/test/unit/block/isValidIndexedAttestation.test.ts @@ -45,7 +45,7 @@ describe("validate indexed attestation", () => { data: attestationData, signature: EMPTY_SIGNATURE, }; - expect(isValidIndexedAttestation(state.epochCtx.index2pubkey, state, indexedAttestation, false)).toBe( + expect(isValidIndexedAttestation(state.config, state.epochCtx.index2pubkey, state, indexedAttestation, false)).toBe( expectedValue ); }); diff --git a/packages/state-transition/test/unit/signatureSets/signatureSets.test.ts b/packages/state-transition/test/unit/signatureSets/signatureSets.test.ts index 00f2cce4d1..6fb825bb41 100644 --- a/packages/state-transition/test/unit/signatureSets/signatureSets.test.ts +++ b/packages/state-transition/test/unit/signatureSets/signatureSets.test.ts @@ -70,7 +70,13 @@ describe("signatureSets", () => { state.epochCtx.getIndexedAttestation(fork, attestation) ); - const signatureSets = getBlockSignatureSets(state.epochCtx.index2pubkey, state, signedBlock, indexedAttestations); + const signatureSets = getBlockSignatureSets( + state.config, + state.epochCtx.index2pubkey, + state, + signedBlock, + indexedAttestations + ); expect(signatureSets.length).toBe( // block signature 1 + From 882891d89c122fd90125fc6c76e598beba94d631 Mon Sep 17 00:00:00 2001 From: Chiemerie Ezechukwu <50174206+chiemerieezechukwu@users.noreply.github.com> Date: Thu, 18 Dec 2025 23:59:39 +0100 Subject: [PATCH 11/20] chore: update bootnode ENR with correct IPV6 (#8705) **Motivation** This PR is to fix https://github.com/eth-clients/mainnet/issues/13 Already created this as well https://github.com/eth-clients/mainnet/pull/14 Closes #issue_number **AI Assistance Disclosure** - [ ] External Contributors: I have read the [contributor guidelines](https://github.com/ChainSafe/lodestar/blob/unstable/CONTRIBUTING.md#ai-assistance-notice) and disclosed my usage of AI below. --- packages/cli/src/networks/hoodi.ts | 1 + packages/cli/src/networks/mainnet.ts | 2 +- packages/cli/test/unit/util/parseBootnodesFile.test.ts | 4 ++-- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/cli/src/networks/hoodi.ts b/packages/cli/src/networks/hoodi.ts index b962bf3e47..3b2ae8bb56 100644 --- a/packages/cli/src/networks/hoodi.ts +++ b/packages/cli/src/networks/hoodi.ts @@ -12,4 +12,5 @@ export const bootEnrs = [ "enr:-Ku4QIC89sMC0o-irosD4_23lJJ4qCGOvdUz7SmoShWx0k6AaxCFTKviEHa-sa7-EzsiXpDp0qP0xzX6nKdXJX3X-IQBh2F0dG5ldHOIAAAAAAAAAACEZXRoMpBd9cEGEAAJEP__________gmlkgnY0gmlwhIbRilSJc2VjcDI1NmsxoQK_m0f1DzDc9Cjrspm36zuRa7072HSiMGYWLsKiVSbP34N1ZHCCIyk", "enr:-Ku4QNkWjw5tNzo8DtWqKm7CnDdIq_y7xppD6c1EZSwjB8rMOkSFA1wJPLoKrq5UvA7wcxIotH6Usx3PAugEN2JMncIBh2F0dG5ldHOIAAAAAAAAAACEZXRoMpBd9cEGEAAJEP__________gmlkgnY0gmlwhIbHuBeJc2VjcDI1NmsxoQP3FwrhFYB60djwRjAoOjttq6du94DtkQuaN99wvgqaIYN1ZHCCIyk", "enr:-OS4QMJGE13xEROqvKN1xnnt7U-noc51VXyM6wFMuL9LMhQDfo1p1dF_zFdS4OsnXz_vIYk-nQWnqJMWRDKvkSK6_CwDh2F0dG5ldHOIAAAAADAAAACGY2xpZW502IpMaWdodGhvdXNljDcuMC4wLWJldGEuM4RldGgykNLxmX9gAAkQAAgAAAAAAACCaWSCdjSCaXCEhse4F4RxdWljgiMqiXNlY3AyNTZrMaECef77P8k5l3PC_raLw42OAzdXfxeQ-58BJriNaqiRGJSIc3luY25ldHMAg3RjcIIjKIN1ZHCCIyg", + "enr:-KG4QKRSUi4IOAIK_xt5ERrwW_J47wmNCLWFh7Jo0hFE69drZsiZ5Pb5CEcM_njFTTLlIR6SCf67HTcSV1g6hCXdhWkCgmlkgnY0gmlwhLkvrBODaXA2kCoGxcAWAAAYAAAAAAAAABCJc2VjcDI1NmsxoQPU7g2jQGTz8BYbB2vLTb39S_PrcZAehwMM0b3bWsM5rIN1ZHCCIyiEdWRwNoIjKA", ]; diff --git a/packages/cli/src/networks/mainnet.ts b/packages/cli/src/networks/mainnet.ts index c0cc965c6f..874c1e6d31 100644 --- a/packages/cli/src/networks/mainnet.ts +++ b/packages/cli/src/networks/mainnet.ts @@ -29,5 +29,5 @@ export const bootEnrs = [ "enr:-LK4QKWrXTpV9T78hNG6s8AM6IO4XH9kFT91uZtFg1GcsJ6dKovDOr1jtAAFPnS2lvNltkOGA9k29BUN7lFh_sjuc9QBh2F0dG5ldHOIAAAAAAAAAACEZXRoMpC1MD8qAAAAAP__________gmlkgnY0gmlwhANAdd-Jc2VjcDI1NmsxoQLQa6ai7y9PMN5hpLe5HmiJSlYzMuzP7ZhwRiwHvqNXdoN0Y3CCI4yDdWRwgiOM", // Lodestar team's bootnodes "enr:-IS4QPi-onjNsT5xAIAenhCGTDl4z-4UOR25Uq-3TmG4V3kwB9ljLTb_Kp1wdjHNj-H8VVLRBSSWVZo3GUe3z6k0E-IBgmlkgnY0gmlwhKB3_qGJc2VjcDI1NmsxoQMvAfgB4cJXvvXeM6WbCG86CstbSxbQBSGx31FAwVtOTYN1ZHCCIyg", - "enr:-KG4QCb8NC3gEM3I0okStV5BPX7Bg6ZXTYCzzbYyEXUPGcZtHmvQtiJH4C4F2jG7azTcb9pN3JlgpfxAnRVFzJ3-LykBgmlkgnY0gmlwhFPlR9KDaXA2kP6AAAAAAAAAAlBW__4my5iJc2VjcDI1NmsxoQLdUv9Eo9sxCt0tc_CheLOWnX59yHJtkBSOL7kpxdJ6GYN1ZHCCIyiEdWRwNoIjKA", + "enr:-KG4QPUf8-g_jU-KrwzG42AGt0wWM1BTnQxgZXlvCEIfTQ5hSmptkmgmMbRkpOqv6kzb33SlhPHJp7x4rLWWiVq5lSECgmlkgnY0gmlwhFPlR9KDaXA2kCoGxcAJAAAVAAAAAAAAABCJc2VjcDI1NmsxoQLdUv9Eo9sxCt0tc_CheLOWnX59yHJtkBSOL7kpxdJ6GYN1ZHCCIyiEdWRwNoIjKA", ]; diff --git a/packages/cli/test/unit/util/parseBootnodesFile.test.ts b/packages/cli/test/unit/util/parseBootnodesFile.test.ts index 8d7045e594..3ddc74245d 100644 --- a/packages/cli/test/unit/util/parseBootnodesFile.test.ts +++ b/packages/cli/test/unit/util/parseBootnodesFile.test.ts @@ -82,7 +82,7 @@ describe("config / bootnodes / parsing", () => { # Lodestar team's bootnodes - enr:-IS4QPi-onjNsT5xAIAenhCGTDl4z-4UOR25Uq-3TmG4V3kwB9ljLTb_Kp1wdjHNj-H8VVLRBSSWVZo3GUe3z6k0E-IBgmlkgnY0gmlwhKB3_qGJc2VjcDI1NmsxoQMvAfgB4cJXvvXeM6WbCG86CstbSxbQBSGx31FAwVtOTYN1ZHCCIyg # 160.119.254.161 | hostafrica-southafrica -- enr:-KG4QCb8NC3gEM3I0okStV5BPX7Bg6ZXTYCzzbYyEXUPGcZtHmvQtiJH4C4F2jG7azTcb9pN3JlgpfxAnRVFzJ3-LykBgmlkgnY0gmlwhFPlR9KDaXA2kP6AAAAAAAAAAlBW__4my5iJc2VjcDI1NmsxoQLdUv9Eo9sxCt0tc_CheLOWnX59yHJtkBSOL7kpxdJ6GYN1ZHCCIyiEdWRwNoIjKA # 83.229.71.210 | kamatera-telaviv-israel +- enr:-KG4QPUf8-g_jU-KrwzG42AGt0wWM1BTnQxgZXlvCEIfTQ5hSmptkmgmMbRkpOqv6kzb33SlhPHJp7x4rLWWiVq5lSECgmlkgnY0gmlwhFPlR9KDaXA2kCoGxcAJAAAVAAAAAAAAABCJc2VjcDI1NmsxoQLdUv9Eo9sxCt0tc_CheLOWnX59yHJtkBSOL7kpxdJ6GYN1ZHCCIyiEdWRwNoIjKA # 83.229.71.210 | kamatera-telaviv-israel `, expected: [ "enr:-Iu4QLm7bZGdAt9NSeJG0cEnJohWcQTQaI9wFLu3Q7eHIDfrI4cwtzvEW3F3VbG9XdFXlrHyFGeXPn9snTCQJ9bnMRABgmlkgnY0gmlwhAOTJQCJc2VjcDI1NmsxoQIZdZD6tDYpkpEfVo5bgiU8MGRjhcOmHGD2nErK0UKRrIN0Y3CCIyiDdWRwgiMo", @@ -101,7 +101,7 @@ describe("config / bootnodes / parsing", () => { "enr:-LK4QA8FfhaAjlb_BXsXxSfiysR7R52Nhi9JBt4F8SPssu8hdE1BXQQEtVDC3qStCW60LSO7hEsVHv5zm8_6Vnjhcn0Bh2F0dG5ldHOIAAAAAAAAAACEZXRoMpC1MD8qAAAAAP__________gmlkgnY0gmlwhAN4aBKJc2VjcDI1NmsxoQJerDhsJ-KxZ8sHySMOCmTO6sHM3iCFQ6VMvLTe948MyYN0Y3CCI4yDdWRwgiOM", "enr:-LK4QKWrXTpV9T78hNG6s8AM6IO4XH9kFT91uZtFg1GcsJ6dKovDOr1jtAAFPnS2lvNltkOGA9k29BUN7lFh_sjuc9QBh2F0dG5ldHOIAAAAAAAAAACEZXRoMpC1MD8qAAAAAP__________gmlkgnY0gmlwhANAdd-Jc2VjcDI1NmsxoQLQa6ai7y9PMN5hpLe5HmiJSlYzMuzP7ZhwRiwHvqNXdoN0Y3CCI4yDdWRwgiOM", "enr:-IS4QPi-onjNsT5xAIAenhCGTDl4z-4UOR25Uq-3TmG4V3kwB9ljLTb_Kp1wdjHNj-H8VVLRBSSWVZo3GUe3z6k0E-IBgmlkgnY0gmlwhKB3_qGJc2VjcDI1NmsxoQMvAfgB4cJXvvXeM6WbCG86CstbSxbQBSGx31FAwVtOTYN1ZHCCIyg", - "enr:-KG4QCb8NC3gEM3I0okStV5BPX7Bg6ZXTYCzzbYyEXUPGcZtHmvQtiJH4C4F2jG7azTcb9pN3JlgpfxAnRVFzJ3-LykBgmlkgnY0gmlwhFPlR9KDaXA2kP6AAAAAAAAAAlBW__4my5iJc2VjcDI1NmsxoQLdUv9Eo9sxCt0tc_CheLOWnX59yHJtkBSOL7kpxdJ6GYN1ZHCCIyiEdWRwNoIjKA", + "enr:-KG4QPUf8-g_jU-KrwzG42AGt0wWM1BTnQxgZXlvCEIfTQ5hSmptkmgmMbRkpOqv6kzb33SlhPHJp7x4rLWWiVq5lSECgmlkgnY0gmlwhFPlR9KDaXA2kCoGxcAJAAAVAAAAAAAAABCJc2VjcDI1NmsxoQLdUv9Eo9sxCt0tc_CheLOWnX59yHJtkBSOL7kpxdJ6GYN1ZHCCIyiEdWRwNoIjKA", ], }, { From 84b481ddb5f865cd30c12ff98658b5c7b84cb9ab Mon Sep 17 00:00:00 2001 From: Nico Flaig Date: Fri, 19 Dec 2025 00:00:34 +0100 Subject: [PATCH 12/20] chore: restore code required to perform sync through bellatrix (#8700) **Motivation** As noted in https://github.com/ChainSafe/lodestar/pull/8680#discussion_r2624026653 we cannot sync through bellatrix anymore. While I don't think it's a big deal it's simple enough to keep that functionality as that code is pretty isolated and won't get in our way during refactors and with gloas won't be part of the block processing pipeline anymore due to block/payload separation. **Description** Restore code required to perform sync through bellatrix - re-added `isExecutionEnabled()` and `isMergeTransitionComplete()` checks during block processing - enabled some spec tests again that were previously skipped - mostly copied original code removed in [#8680](https://github.com/ChainSafe/lodestar/pull/8680) but cleaned up some comments and simplified a bit --- .../blocks/verifyBlocksExecutionPayloads.ts | 11 +++++- .../beacon-node/src/chain/forkChoice/index.ts | 5 ++- .../beacon-node/src/chain/validation/block.ts | 3 +- packages/beacon-node/src/node/notifier.ts | 20 ++++++---- .../test/spec/utils/specTestIterator.ts | 7 +--- .../fork-choice/src/forkChoice/forkChoice.ts | 3 +- packages/state-transition/src/block/index.ts | 8 +++- .../src/block/processExecutionPayload.ts | 22 +++++++---- .../state-transition/src/util/execution.ts | 39 +++++++++++++++++++ 9 files changed, 89 insertions(+), 29 deletions(-) diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts index a74e3f033d..2869cb12e7 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts @@ -8,7 +8,12 @@ import { ProtoBlock, } from "@lodestar/fork-choice"; import {ForkSeq} from "@lodestar/params"; -import {CachedBeaconStateAllForks, isExecutionBlockBodyType, isExecutionStateType} from "@lodestar/state-transition"; +import { + CachedBeaconStateAllForks, + isExecutionBlockBodyType, + isExecutionEnabled, + isExecutionStateType, +} from "@lodestar/state-transition"; import {bellatrix, electra} from "@lodestar/types"; import {ErrorAborted, Logger, toRootHex} from "@lodestar/utils"; import {ExecutionPayloadStatus, IExecutionEngine} from "../../execution/engine/interface.js"; @@ -145,7 +150,9 @@ export async function verifyBlockExecutionPayload( const block = blockInput.getBlock(); /** Not null if execution is enabled */ const executionPayloadEnabled = - isExecutionStateType(preState0) && isExecutionBlockBodyType(block.message.body) + isExecutionStateType(preState0) && + isExecutionBlockBodyType(block.message.body) && + isExecutionEnabled(preState0, block.message) ? block.message.body.executionPayload : null; diff --git a/packages/beacon-node/src/chain/forkChoice/index.ts b/packages/beacon-node/src/chain/forkChoice/index.ts index 56da740ee9..d83dc9949d 100644 --- a/packages/beacon-node/src/chain/forkChoice/index.ts +++ b/packages/beacon-node/src/chain/forkChoice/index.ts @@ -18,6 +18,7 @@ import { getBlockRootAtSlot, getEffectiveBalanceIncrementsZeroInactive, isExecutionStateType, + isMergeTransitionComplete, } from "@lodestar/state-transition"; import {Slot, ssz} from "@lodestar/types"; import {Logger, toRootHex} from "@lodestar/utils"; @@ -134,7 +135,7 @@ export function initializeForkChoiceFromFinalizedState( unrealizedFinalizedEpoch: finalizedCheckpoint.epoch, unrealizedFinalizedRoot: toRootHex(finalizedCheckpoint.root), - ...(isExecutionStateType(state) + ...(isExecutionStateType(state) && isMergeTransitionComplete(state) ? { executionPayloadBlockHash: toRootHex(state.latestExecutionPayloadHeader.blockHash), executionPayloadNumber: state.latestExecutionPayloadHeader.blockNumber, @@ -215,7 +216,7 @@ export function initializeForkChoiceFromUnfinalizedState( unrealizedFinalizedEpoch: finalizedCheckpoint.epoch, unrealizedFinalizedRoot: toRootHex(finalizedCheckpoint.root), - ...(isExecutionStateType(unfinalizedState) + ...(isExecutionStateType(unfinalizedState) && isMergeTransitionComplete(unfinalizedState) ? { executionPayloadBlockHash: toRootHex(unfinalizedState.latestExecutionPayloadHeader.blockHash), executionPayloadNumber: unfinalizedState.latestExecutionPayloadHeader.blockNumber, diff --git a/packages/beacon-node/src/chain/validation/block.ts b/packages/beacon-node/src/chain/validation/block.ts index 44fa18457c..905096e79c 100644 --- a/packages/beacon-node/src/chain/validation/block.ts +++ b/packages/beacon-node/src/chain/validation/block.ts @@ -6,6 +6,7 @@ import { computeTimeAtSlot, getBlockProposerSignatureSet, isExecutionBlockBodyType, + isExecutionEnabled, isExecutionStateType, } from "@lodestar/state-transition"; import {SignedBeaconBlock, deneb} from "@lodestar/types"; @@ -139,7 +140,7 @@ export async function validateGossipBlock( if (fork === ForkName.bellatrix) { if (!isExecutionBlockBodyType(block.body)) throw Error("Not merge block type"); const executionPayload = block.body.executionPayload; - if (isExecutionStateType(blockState)) { + if (isExecutionStateType(blockState) && isExecutionEnabled(blockState, block)) { const expectedTimestamp = computeTimeAtSlot(config, blockSlot, chain.genesisTime); if (executionPayload.timestamp !== computeTimeAtSlot(config, blockSlot, chain.genesisTime)) { throw new BlockGossipError(GossipAction.REJECT, { diff --git a/packages/beacon-node/src/node/notifier.ts b/packages/beacon-node/src/node/notifier.ts index aabb1850a0..f40055ec4e 100644 --- a/packages/beacon-node/src/node/notifier.ts +++ b/packages/beacon-node/src/node/notifier.ts @@ -6,6 +6,7 @@ import { computeEpochAtSlot, computeStartSlotAtEpoch, isExecutionCachedStateType, + isMergeTransitionComplete, } from "@lodestar/state-transition"; import {Epoch} from "@lodestar/types"; import {ErrorAborted, Logger, prettyBytes, prettyBytesShort, sleep} from "@lodestar/utils"; @@ -171,13 +172,18 @@ function getHeadExecutionInfo( // Add execution status to notifier only if head is on/post bellatrix if (isExecutionCachedStateType(headState)) { - const executionPayloadHashInfo = - headInfo.executionStatus !== ExecutionStatus.PreMerge ? headInfo.executionPayloadBlockHash : "empty"; - const executionPayloadNumberInfo = - headInfo.executionStatus !== ExecutionStatus.PreMerge ? headInfo.executionPayloadNumber : NaN; - return [ - `exec-block: ${executionStatusStr}(${executionPayloadNumberInfo} ${prettyBytesShort(executionPayloadHashInfo)})`, - ]; + if (isMergeTransitionComplete(headState)) { + const executionPayloadHashInfo = + headInfo.executionStatus !== ExecutionStatus.PreMerge ? headInfo.executionPayloadBlockHash : "empty"; + const executionPayloadNumberInfo = + headInfo.executionStatus !== ExecutionStatus.PreMerge ? headInfo.executionPayloadNumber : NaN; + return [ + `exec-block: ${executionStatusStr}(${executionPayloadNumberInfo} ${prettyBytesShort( + executionPayloadHashInfo + )})`, + ]; + } + return [`exec-block: ${executionStatusStr}`]; } return []; diff --git a/packages/beacon-node/test/spec/utils/specTestIterator.ts b/packages/beacon-node/test/spec/utils/specTestIterator.ts index 428b6602d4..3286cc146f 100644 --- a/packages/beacon-node/test/spec/utils/specTestIterator.ts +++ b/packages/beacon-node/test/spec/utils/specTestIterator.ts @@ -75,12 +75,7 @@ export const defaultSkipOpts: SkipOpts = { /^gloas\/(finality|fork_choice|networking|sanity|transition)\/.*$/, /^gloas\/ssz_static\/ForkChoiceNode.*$/, ], - skippedTests: [ - // These tests validate "first payload" scenarios where is_execution_enabled was false pre-merge. - // Since we removed merge transition support, these code paths no longer exist. - /^bellatrix\/operations\/execution_payload\/.+\/bad_parent_hash_first_payload$/, - /^bellatrix\/sanity\/blocks\/.+\/is_execution_enabled_false$/, - ], + skippedTests: [], skippedRunners: [], }; diff --git a/packages/fork-choice/src/forkChoice/forkChoice.ts b/packages/fork-choice/src/forkChoice/forkChoice.ts index 02234e8f73..779583d3f9 100644 --- a/packages/fork-choice/src/forkChoice/forkChoice.ts +++ b/packages/fork-choice/src/forkChoice/forkChoice.ts @@ -10,6 +10,7 @@ import { computeStartSlotAtEpoch, getAttesterSlashableIndices, isExecutionBlockBodyType, + isExecutionEnabled, isExecutionStateType, } from "@lodestar/state-transition"; import {computeUnrealizedCheckpoints} from "@lodestar/state-transition/epoch"; @@ -741,7 +742,7 @@ export class ForkChoice implements IForkChoice { unrealizedFinalizedEpoch: unrealizedFinalizedCheckpoint.epoch, unrealizedFinalizedRoot: unrealizedFinalizedCheckpoint.rootHex, - ...(isExecutionBlockBodyType(block.body) && isExecutionStateType(state) + ...(isExecutionBlockBodyType(block.body) && isExecutionStateType(state) && isExecutionEnabled(state, block) ? { executionPayloadBlockHash: toRootHex(block.body.executionPayload.blockHash), executionPayloadNumber: block.body.executionPayload.blockNumber, diff --git a/packages/state-transition/src/block/index.ts b/packages/state-transition/src/block/index.ts index a5d67e4c75..c208c7964f 100644 --- a/packages/state-transition/src/block/index.ts +++ b/packages/state-transition/src/block/index.ts @@ -7,7 +7,7 @@ import { CachedBeaconStateCapella, CachedBeaconStateGloas, } from "../types.js"; -import {getFullOrBlindedPayload} from "../util/execution.js"; +import {getFullOrBlindedPayload, isExecutionEnabled} from "../util/execution.js"; import {BlockExternalData, DataAvailabilityStatus} from "./externalData.js"; import {processBlobKzgCommitments} from "./processBlobKzgCommitments.js"; import {processBlockHeader} from "./processBlockHeader.js"; @@ -67,7 +67,11 @@ export function processBlock( // The call to the process_execution_payload must happen before the call to the process_randao as the former depends // on the randao_mix computed with the reveal of the previous block. // TODO GLOAS: We call processExecutionPayload somewhere else post-gloas - if (fork >= ForkSeq.bellatrix && fork < ForkSeq.gloas) { + if ( + fork < ForkSeq.gloas && + fork >= ForkSeq.bellatrix && + isExecutionEnabled(state as CachedBeaconStateBellatrix, block) + ) { processExecutionPayload(fork, state as CachedBeaconStateBellatrix, block.body, externalData); } diff --git a/packages/state-transition/src/block/processExecutionPayload.ts b/packages/state-transition/src/block/processExecutionPayload.ts index 65f28822a0..0af784074c 100644 --- a/packages/state-transition/src/block/processExecutionPayload.ts +++ b/packages/state-transition/src/block/processExecutionPayload.ts @@ -3,7 +3,11 @@ import {ForkName, ForkSeq, isForkPostDeneb} from "@lodestar/params"; import {BeaconBlockBody, BlindedBeaconBlockBody, deneb, isExecutionPayload} from "@lodestar/types"; import {toHex, toRootHex} from "@lodestar/utils"; import {CachedBeaconStateBellatrix, CachedBeaconStateCapella} from "../types.js"; -import {executionPayloadToPayloadHeader, getFullOrBlindedPayloadFromBody} from "../util/execution.js"; +import { + executionPayloadToPayloadHeader, + getFullOrBlindedPayloadFromBody, + isMergeTransitionComplete, +} from "../util/execution.js"; import {computeEpochAtSlot, computeTimeAtSlot, getRandaoMix} from "../util/index.js"; import {BlockExternalData, ExecutionPayloadStatus} from "./externalData.js"; @@ -17,13 +21,15 @@ export function processExecutionPayload( const forkName = ForkName[ForkSeq[fork] as ForkName]; // Verify consistency of the parent hash, block number, base fee per gas and gas limit // with respect to the previous execution payload header - const {latestExecutionPayloadHeader} = state; - if (!byteArrayEquals(payload.parentHash, latestExecutionPayloadHeader.blockHash)) { - throw Error( - `Invalid execution payload parentHash ${toRootHex(payload.parentHash)} latest blockHash ${toRootHex( - latestExecutionPayloadHeader.blockHash - )}` - ); + if (isMergeTransitionComplete(state)) { + const {latestExecutionPayloadHeader} = state; + if (!byteArrayEquals(payload.parentHash, latestExecutionPayloadHeader.blockHash)) { + throw Error( + `Invalid execution payload parentHash ${toRootHex(payload.parentHash)} latest blockHash ${toRootHex( + latestExecutionPayloadHeader.blockHash + )}` + ); + } } // Verify random diff --git a/packages/state-transition/src/util/execution.ts b/packages/state-transition/src/util/execution.ts index a64f21a086..0f5b450f5c 100644 --- a/packages/state-transition/src/util/execution.ts +++ b/packages/state-transition/src/util/execution.ts @@ -2,6 +2,7 @@ import {ForkName, ForkPostBellatrix, ForkPreGloas, ForkSeq} from "@lodestar/para import { BeaconBlock, BeaconBlockBody, + BlindedBeaconBlock, BlindedBeaconBlockBody, ExecutionPayload, ExecutionPayloadHeader, @@ -9,16 +10,54 @@ import { capella, deneb, isBlindedBeaconBlockBody, + isExecutionPayload, ssz, } from "@lodestar/types"; import { BeaconStateAllForks, + BeaconStateBellatrix, BeaconStateCapella, BeaconStateExecutions, CachedBeaconStateAllForks, CachedBeaconStateExecutions, } from "../types.js"; +/** + * Execution enabled = merge is done. + * When (A) state has execution data OR (B) block has execution data + */ +export function isExecutionEnabled(state: BeaconStateExecutions, block: BeaconBlock | BlindedBeaconBlock): boolean { + if (isMergeTransitionComplete(state)) { + return true; + } + + // Throws if not post-bellatrix block. A fork-guard before isExecutionEnabled() prevents this from happening + const payload = getFullOrBlindedPayload(block); + + return isExecutionPayload(payload) + ? !ssz.bellatrix.ExecutionPayload.equals(payload, ssz.bellatrix.ExecutionPayload.defaultValue()) + : !ssz.bellatrix.ExecutionPayloadHeader.equals( + state.latestExecutionPayloadHeader, + ssz.bellatrix.ExecutionPayloadHeader.defaultValue() + ); +} + +/** + * Merge is complete when the state includes execution layer data: + * state.latestExecutionPayloadHeader NOT EMPTY or state is post-capella + */ +export function isMergeTransitionComplete(state: BeaconStateExecutions): boolean { + if (isCapellaStateType(state)) { + // All networks have completed the merge transition before capella + return true; + } + + return !ssz.bellatrix.ExecutionPayloadHeader.equals( + (state as BeaconStateBellatrix).latestExecutionPayloadHeader, + ssz.bellatrix.ExecutionPayloadHeader.defaultValue() + ); +} + /** Type guard for bellatrix.BeaconState */ export function isExecutionStateType(state: BeaconStateAllForks): state is BeaconStateExecutions { return (state as BeaconStateExecutions).latestExecutionPayloadHeader !== undefined; From 493cc12d2fb4ab8dc8d2a36d2eed910b9e8d053d Mon Sep 17 00:00:00 2001 From: Nico Flaig Date: Fri, 19 Dec 2025 16:43:28 +0100 Subject: [PATCH 13/20] feat: update vc to submit beacon committee selections once per epoch (#8669) **Motivation** Closes https://github.com/ChainSafe/lodestar/issues/8606 **Description** This updates our implementation to be compliant with latest spec https://github.com/ethereum/beacon-APIs/pull/368. For sync committee aggregation selection (unchanged) - we call `submitSyncCommitteeSelections` at the start of the slot - the timeout is still based on `CONTRIBUTION_DUE_BPS` into the slot (8 seconds) - we call the endpoint for all duties of this slot - logic has been moved to duties service For attestation aggregation selection - we call `submitBeaconCommitteeSelections` at the start of the epoch for current and next epoch (2 separate calls) - the timeout uses default which is based on `SLOT_DURATION_MS` (12 seconds) - we only call `prepareBeaconCommitteeSubnet` once the above call either resolved or failed, this should be fine as it's not that time sensitive (one epoch lookahead) - if duties are reorged, we will call `submitBeaconCommitteeSelections` with duties of affected epoch - logic has been moved to duties service Previous PR https://github.com/ChainSafe/lodestar/pull/5344 --- packages/api/src/beacon/routes/validator.ts | 7 ++ .../validator/src/services/attestation.ts | 103 +----------------- .../src/services/attestationDuties.ts | 64 ++++++++++- .../validator/src/services/syncCommittee.ts | 95 +--------------- .../src/services/syncCommitteeDuties.ts | 84 +++++++++++++- .../test/unit/services/attestation.test.ts | 37 +------ .../unit/services/attestationDuties.test.ts | 81 ++++++++++++++ .../unit/services/syncCommitteDuties.test.ts | 73 +++++++++++++ .../test/unit/services/syncCommittee.test.ts | 31 +----- 9 files changed, 312 insertions(+), 263 deletions(-) diff --git a/packages/api/src/beacon/routes/validator.ts b/packages/api/src/beacon/routes/validator.ts index e56675ddfd..0d2c0fa2a0 100644 --- a/packages/api/src/beacon/routes/validator.ts +++ b/packages/api/src/beacon/routes/validator.ts @@ -498,6 +498,10 @@ export type Endpoints = { * a validator client to correctly determine if one of its validators has been selected to * perform an aggregation duty in this slot. * + * Validator clients running in a distributed validator cluster must query this endpoint + * at the start of an epoch for the current and lookahead (next) epochs for all validators + * that have attester duties in the current and lookahead epochs. + * * Note that this endpoint is not implemented by the beacon node and will return a 501 error * * Returns an array of threshold aggregated beacon committee selection proofs @@ -521,6 +525,9 @@ export type Endpoints = { * a validator client to correctly determine if one of its validators has been selected to * perform a sync committee contribution (sync aggregation) duty in this slot. * + * Validator clients running in a distributed validator cluster must query this endpoint + * at the start of each slot for all validators that are included in the current sync committee. + * * Note that this endpoint is not implemented by the beacon node and will return a 501 error * * Returns an array of threshold aggregated sync committee selection proofs diff --git a/packages/validator/src/services/attestation.ts b/packages/validator/src/services/attestation.ts index 8043c16158..2dbb2b847e 100644 --- a/packages/validator/src/services/attestation.ts +++ b/packages/validator/src/services/attestation.ts @@ -1,8 +1,8 @@ -import {ApiClient, routes} from "@lodestar/api"; +import {ApiClient} from "@lodestar/api"; import {ChainForkConfig} from "@lodestar/config"; import {ForkName, isForkPostElectra} from "@lodestar/params"; -import {computeEpochAtSlot, isAggregatorFromCommitteeLength} from "@lodestar/state-transition"; -import {BLSSignature, SignedAggregateAndProof, SingleAttestation, Slot, phase0, ssz} from "@lodestar/types"; +import {computeEpochAtSlot} from "@lodestar/state-transition"; +import {SignedAggregateAndProof, SingleAttestation, Slot, phase0, ssz} from "@lodestar/types"; import {prettyBytes, sleep, toRootHex} from "@lodestar/utils"; import {Metrics} from "../metrics.js"; import {PubkeyHex} from "../types.js"; @@ -75,18 +75,6 @@ export class AttestationService { } const fork = this.config.getForkName(slot); - if (this.opts?.distributedAggregationSelection) { - // Validator in distributed cluster only has a key share, not the full private key. - // The partial selection proofs must be exchanged for combined selection proofs by - // calling submitBeaconCommitteeSelections on the distributed validator middleware client. - // This will run in parallel to other attestation tasks but must be finished before starting - // attestation aggregation as it is required to correctly determine if validator is aggregator - // and to produce a AggregateAndProof that can be threshold aggregated by the middleware client. - this.runDistributedAggregationSelectionTasks(fork, duties, slot, signal).catch((e) => - this.logger.error("Error on attestation aggregation selection", {slot}, e) - ); - } - // A validator should create and broadcast the attestation to the associated attestation subnet when either // (a) the validator has received a valid block from the expected block proposer for the assigned slot or // (b) ATTESTATION_DUE_BPS of the slot has transpired -- whichever comes first. @@ -274,89 +262,4 @@ export class AttestationService { } } } - - /** - * Performs additional attestation aggregation tasks required if validator is part of distributed cluster - * - * 1. Exchange partial for combined selection proofs - * 2. Determine validators that should aggregate attestations - * 3. Mutate duty objects to set selection proofs for aggregators - * 4. Resubscribe validators as aggregators on beacon committee subnets - * - * See https://docs.google.com/document/d/1q9jOTPcYQa-3L8luRvQJ-M0eegtba4Nmon3dpO79TMk/mobilebasic - */ - private async runDistributedAggregationSelectionTasks( - fork: ForkName, - duties: AttDutyAndProof[], - slot: number, - signal: AbortSignal - ): Promise { - const partialSelections: routes.validator.BeaconCommitteeSelection[] = duties.map( - ({duty, partialSelectionProof}) => ({ - validatorIndex: duty.validatorIndex, - slot, - selectionProof: partialSelectionProof as BLSSignature, - }) - ); - - this.logger.debug("Submitting partial beacon committee selection proofs", {slot, count: partialSelections.length}); - - const res = await Promise.race([ - this.api.validator.submitBeaconCommitteeSelections({selections: partialSelections}), - // Exit attestation aggregation flow if there is no response after ATTESTATION_DUE_BPS of the slot as - // beacon node would likely not have enough time to prepare an aggregate attestation. - // Note that the aggregations flow is not explicitly exited but rather will be skipped - // due to the fact that calculation of `is_aggregator` in AttestationDutiesService is not done - // and selectionProof is set to null, meaning no validator will be considered an aggregator. - sleep(this.config.getAttestationDueMs(fork) - this.clock.msFromSlot(slot), signal), - ]); - - if (!res) { - throw new Error("Failed to receive combined selection proofs before ATTESTATION_DUE_BPS of the slot"); - } - - const combinedSelections = res.value(); - this.logger.debug("Received combined beacon committee selection proofs", {slot, count: combinedSelections.length}); - - const beaconCommitteeSubscriptions: routes.validator.BeaconCommitteeSubscription[] = []; - - for (const dutyAndProof of duties) { - const {validatorIndex, committeeIndex, committeeLength, committeesAtSlot} = dutyAndProof.duty; - const logCtxValidator = {slot, index: committeeIndex, validatorIndex}; - - const combinedSelection = combinedSelections.find((s) => s.validatorIndex === validatorIndex && s.slot === slot); - - if (!combinedSelection) { - this.logger.warn("Did not receive combined beacon committee selection proof", logCtxValidator); - continue; - } - - const isAggregator = isAggregatorFromCommitteeLength(committeeLength, combinedSelection.selectionProof); - - if (isAggregator) { - // Update selection proof by mutating duty object - dutyAndProof.selectionProof = combinedSelection.selectionProof; - - // Only push subnet subscriptions with `isAggregator=true` as all validators - // with duties for slot are already subscribed to subnets with `isAggregator=false`. - beaconCommitteeSubscriptions.push({ - validatorIndex, - committeesAtSlot, - committeeIndex, - slot, - isAggregator, - }); - this.logger.debug("Resubscribing validator as aggregator on beacon committee subnet", logCtxValidator); - } - } - - // If there are any subscriptions with aggregators, push them out to the beacon node. - if (beaconCommitteeSubscriptions.length > 0) { - (await this.api.validator.prepareBeaconCommitteeSubnet({subscriptions: beaconCommitteeSubscriptions})).assertOk(); - this.logger.debug("Resubscribed validators as aggregators on beacon committee subnets", { - slot, - count: beaconCommitteeSubscriptions.length, - }); - } - } } diff --git a/packages/validator/src/services/attestationDuties.ts b/packages/validator/src/services/attestationDuties.ts index 3464f9d84c..eed42e8a5a 100644 --- a/packages/validator/src/services/attestationDuties.ts +++ b/packages/validator/src/services/attestationDuties.ts @@ -204,6 +204,17 @@ export class AttestationDutiesService { for (const epoch of [currentEpoch, nextEpoch]) { const epochDuties = this.dutiesByIndexByEpoch.get(epoch)?.dutiesByIndex; if (epochDuties) { + if (this.opts?.distributedAggregationSelection) { + // Validator in distributed cluster only has a key share, not the full private key. + // The partial selection proofs must be exchanged for combined selection proofs by + // calling submitBeaconCommitteeSelections on the distributed validator middleware client. + // This is required to correctly determine if validator is aggregator and to produce + // a AggregateAndProof that can be threshold aggregated by the middleware client. + await this.runDistributedAggregationSelectionTasks(Array.from(epochDuties.values()), epoch).catch((e) => + this.logger.error("Error on attestation aggregation selection", {epoch}, e) + ); + } + for (const {duty, selectionProof} of epochDuties.values()) { if (indexSet.has(duty.validatorIndex)) { beaconCommitteeSubscriptions.push({ @@ -367,6 +378,12 @@ export class AttestationDutiesService { const epochDuties = this.dutiesByIndexByEpoch.get(dutyEpoch)?.dutiesByIndex; if (epochDuties) { + if (this.opts?.distributedAggregationSelection) { + await this.runDistributedAggregationSelectionTasks(Array.from(epochDuties.values()), dutyEpoch).catch((e) => + this.logger.error("Error on attestation aggregation selection after duties reorg", logContext, e) + ); + } + for (const {duty, selectionProof} of epochDuties.values()) { beaconCommitteeSubscriptions.push({ validatorIndex: duty.validatorIndex, @@ -403,8 +420,8 @@ export class AttestationDutiesService { if (this.opts?.distributedAggregationSelection) { // Validator in distributed cluster only has a key share, not the full private key. // Passing a partial selection proof to `is_aggregator` would produce incorrect result. - // AttestationService will exchange partial for combined selection proofs retrieved from - // distributed validator middleware client and determine aggregators at beginning of every slot. + // Before subscribing to beacon committee subnets, aggregators are determined by exchanging + // partial for combined selection proofs retrieved from distributed validator middleware client. return {duty, selectionProof: null, partialSelectionProof: selectionProof}; } @@ -427,4 +444,47 @@ export class AttestationDutiesService { } } } + + /** + * Performs additional attestation aggregation tasks required if validator is part of distributed cluster + * + * 1. Exchange partial for combined selection proofs + * 2. Determine validators that should aggregate attestations + * 3. Mutate duty objects to set selection proofs for aggregators + */ + private async runDistributedAggregationSelectionTasks(duties: AttDutyAndProof[], epoch: Epoch): Promise { + const partialSelections: routes.validator.BeaconCommitteeSelection[] = duties.map( + ({duty, partialSelectionProof}) => ({ + validatorIndex: duty.validatorIndex, + slot: duty.slot, + selectionProof: partialSelectionProof as BLSSignature, + }) + ); + + this.logger.debug("Submitting partial beacon committee selection proofs", {epoch, count: partialSelections.length}); + + const res = await this.api.validator.submitBeaconCommitteeSelections({selections: partialSelections}); + + const combinedSelections = res.value(); + this.logger.debug("Received combined beacon committee selection proofs", {epoch, count: combinedSelections.length}); + + for (const dutyAndProof of duties) { + const {slot, validatorIndex, committeeIndex, committeeLength} = dutyAndProof.duty; + const logCtxValidator = {slot, index: committeeIndex, validatorIndex}; + + const combinedSelection = combinedSelections.find((s) => s.validatorIndex === validatorIndex && s.slot === slot); + + if (!combinedSelection) { + this.logger.warn("Did not receive combined beacon committee selection proof", logCtxValidator); + continue; + } + + const isAggregator = isAggregatorFromCommitteeLength(committeeLength, combinedSelection.selectionProof); + + if (isAggregator) { + // Update selection proof by mutating duty object + dutyAndProof.selectionProof = combinedSelection.selectionProof; + } + } + } } diff --git a/packages/validator/src/services/syncCommittee.ts b/packages/validator/src/services/syncCommittee.ts index 9edaa188c0..6f55d6be8e 100644 --- a/packages/validator/src/services/syncCommittee.ts +++ b/packages/validator/src/services/syncCommittee.ts @@ -1,8 +1,7 @@ -import {ApiClient, routes} from "@lodestar/api"; +import {ApiClient} from "@lodestar/api"; import {ChainForkConfig} from "@lodestar/config"; import {ForkName, isForkPostAltair} from "@lodestar/params"; -import {isSyncCommitteeAggregator} from "@lodestar/state-transition"; -import {BLSSignature, CommitteeIndex, Root, Slot, altair} from "@lodestar/types"; +import {CommitteeIndex, Root, Slot, altair} from "@lodestar/types"; import {sleep} from "@lodestar/utils"; import {Metrics} from "../metrics.js"; import {PubkeyHex} from "../types.js"; @@ -73,18 +72,6 @@ export class SyncCommitteeService { return; } - if (this.opts?.distributedAggregationSelection) { - // Validator in distributed cluster only has a key share, not the full private key. - // The partial selection proofs must be exchanged for combined selection proofs by - // calling submitSyncCommitteeSelections on the distributed validator middleware client. - // This will run in parallel to other sync committee tasks but must be finished before starting - // sync committee contributions as it is required to correctly determine if validator is aggregator - // and to produce a ContributionAndProof that can be threshold aggregated by the middleware client. - this.runDistributedAggregationSelectionTasks(fork, dutiesAtSlot, slot, signal).catch((e) => - this.logger.error("Error on sync committee aggregation selection", {slot}, e) - ); - } - // unlike Attestation, SyncCommitteeSignature could be published asap // especially with lodestar, it's very busy at ATTESTATION_DUE_BPS of the slot // see https://github.com/ChainSafe/lodestar/issues/4608 @@ -257,82 +244,4 @@ export class SyncCommitteeService { } } } - - /** - * Performs additional sync committee contribution tasks required if validator is part of distributed cluster - * - * 1. Exchange partial for combined selection proofs - * 2. Determine validators that should produce sync committee contribution - * 3. Mutate duty objects to set selection proofs for aggregators - * - * See https://docs.google.com/document/d/1q9jOTPcYQa-3L8luRvQJ-M0eegtba4Nmon3dpO79TMk/mobilebasic - */ - private async runDistributedAggregationSelectionTasks( - fork: ForkName, - duties: SyncDutyAndProofs[], - slot: number, - signal: AbortSignal - ): Promise { - const partialSelections: routes.validator.SyncCommitteeSelection[] = []; - - for (const {duty, selectionProofs} of duties) { - const validatorSelections: routes.validator.SyncCommitteeSelection[] = selectionProofs.map( - ({subcommitteeIndex, partialSelectionProof}) => ({ - validatorIndex: duty.validatorIndex, - slot, - subcommitteeIndex, - selectionProof: partialSelectionProof as BLSSignature, - }) - ); - partialSelections.push(...validatorSelections); - } - - this.logger.debug("Submitting partial sync committee selection proofs", {slot, count: partialSelections.length}); - - const res = await Promise.race([ - this.api.validator.submitSyncCommitteeSelections({selections: partialSelections}), - // Exit sync committee contributions flow if there is no response after CONTRIBUTION_DUE_BPS of the slot. - // This is in contrast to attestations aggregations flow which is already exited at ATTESTATION_DUE_BPS of the slot - // because for sync committee is not required to resubscribe to subnets as beacon node will assume - // validator always aggregates. This allows us to wait until we have to produce sync committee contributions. - // Note that the sync committee contributions flow is not explicitly exited but rather will be skipped - // due to the fact that calculation of `is_sync_committee_aggregator` in SyncCommitteeDutiesService is not done - // and selectionProof is set to null, meaning no validator will be considered an aggregator. - sleep(this.config.getSyncContributionDueMs(fork) - this.clock.msFromSlot(slot), signal), - ]); - - if (!res) { - throw new Error("Failed to receive combined selection proofs before CONTRIBUTION_DUE_BPS of the slot"); - } - - const combinedSelections = res.value(); - this.logger.debug("Received combined sync committee selection proofs", {slot, count: combinedSelections.length}); - - for (const dutyAndProofs of duties) { - const {validatorIndex, subnets} = dutyAndProofs.duty; - - for (const subnet of subnets) { - const logCtxValidator = {slot, index: subnet, validatorIndex}; - - const combinedSelection = combinedSelections.find( - (s) => s.validatorIndex === validatorIndex && s.slot === slot && s.subcommitteeIndex === subnet - ); - - if (!combinedSelection) { - this.logger.warn("Did not receive combined sync committee selection proof", logCtxValidator); - continue; - } - - const isAggregator = isSyncCommitteeAggregator(combinedSelection.selectionProof); - - if (isAggregator) { - const selectionProofObject = dutyAndProofs.selectionProofs.find((p) => p.subcommitteeIndex === subnet); - if (selectionProofObject) { - // Update selection proof by mutating proof objects in duty object - selectionProofObject.selectionProof = combinedSelection.selectionProof; - } - } - } - } - } } diff --git a/packages/validator/src/services/syncCommitteeDuties.ts b/packages/validator/src/services/syncCommitteeDuties.ts index 218169e1dc..d8494c81a9 100644 --- a/packages/validator/src/services/syncCommitteeDuties.ts +++ b/packages/validator/src/services/syncCommitteeDuties.ts @@ -85,7 +85,7 @@ export class SyncCommitteeDutiesService { private readonly config: ChainForkConfig, private readonly logger: LoggerVc, private readonly api: ApiClient, - clock: IClock, + private readonly clock: IClock, private readonly validatorStore: ValidatorStore, syncingStatusTracker: SyncingStatusTracker, metrics: Metrics | null, @@ -134,6 +134,18 @@ export class SyncCommitteeDutiesService { selectionProofs: await this.getSelectionProofs(slot, dutyAtPeriod.duty), }); } + + if (this.opts?.distributedAggregationSelection) { + // Validator in distributed cluster only has a key share, not the full private key. + // The partial selection proofs must be exchanged for combined selection proofs by + // calling submitSyncCommitteeSelections on the distributed validator middleware client. + // This will run in parallel to other sync committee tasks but must be finished before starting + // sync committee contributions as it is required to correctly determine if validator is aggregator + // and to produce a ContributionAndProof that can be threshold aggregated by the middleware client. + this.runDistributedAggregationSelectionTasks(duties, slot).catch((e) => + this.logger.error("Error on sync committee aggregation selection", {slot}, e) + ); + } } return duties; @@ -307,8 +319,8 @@ export class SyncCommitteeDutiesService { if (this.opts?.distributedAggregationSelection) { // Validator in distributed cluster only has a key share, not the full private key. // Passing a partial selection proof to `is_sync_committee_aggregator` would produce incorrect result. - // SyncCommitteeService will exchange partial for combined selection proofs retrieved from - // distributed validator middleware client and determine aggregators at beginning of every slot. + // For all duties in the slot, aggregators are determined by exchanging partial for combined selection + // proofs retrieved from distributed validator middleware client at beginning of every slot. dutiesAndProofs.push({ selectionProof: null, partialSelectionProof: selectionProof, @@ -334,4 +346,70 @@ export class SyncCommitteeDutiesService { } } } + + /** + * Performs additional sync committee contribution tasks required if validator is part of distributed cluster + * + * 1. Exchange partial for combined selection proofs + * 2. Determine validators that should produce sync committee contribution + * 3. Mutate duty objects to set selection proofs for aggregators + */ + private async runDistributedAggregationSelectionTasks(duties: SyncDutyAndProofs[], slot: number): Promise { + const partialSelections: routes.validator.SyncCommitteeSelection[] = []; + + for (const {duty, selectionProofs} of duties) { + const validatorSelections: routes.validator.SyncCommitteeSelection[] = selectionProofs.map( + ({subcommitteeIndex, partialSelectionProof}) => ({ + validatorIndex: duty.validatorIndex, + slot, + subcommitteeIndex, + selectionProof: partialSelectionProof as BLSSignature, + }) + ); + partialSelections.push(...validatorSelections); + } + + this.logger.debug("Submitting partial sync committee selection proofs", {slot, count: partialSelections.length}); + + const res = await this.api.validator.submitSyncCommitteeSelections( + {selections: partialSelections}, + { + // Exit sync committee contributions flow if there is no response until CONTRIBUTION_DUE_BPS of the slot. + // Note that the sync committee contributions flow is not explicitly exited but rather will be skipped + // due to the fact that calculation of `is_sync_committee_aggregator` in SyncCommitteeDutiesService is not done + // and selectionProof is set to null, meaning no validator will be considered an aggregator. + timeoutMs: this.config.getSyncContributionDueMs(this.config.getForkName(slot)) - this.clock.msFromSlot(slot), + } + ); + + const combinedSelections = res.value(); + this.logger.debug("Received combined sync committee selection proofs", {slot, count: combinedSelections.length}); + + for (const dutyAndProofs of duties) { + const {validatorIndex, subnets} = dutyAndProofs.duty; + + for (const subnet of subnets) { + const logCtxValidator = {slot, index: subnet, validatorIndex}; + + const combinedSelection = combinedSelections.find( + (s) => s.validatorIndex === validatorIndex && s.slot === slot && s.subcommitteeIndex === subnet + ); + + if (!combinedSelection) { + this.logger.warn("Did not receive combined sync committee selection proof", logCtxValidator); + continue; + } + + const isAggregator = isSyncCommitteeAggregator(combinedSelection.selectionProof); + + if (isAggregator) { + const selectionProofObject = dutyAndProofs.selectionProofs.find((p) => p.subcommitteeIndex === subnet); + if (selectionProofObject) { + // Update selection proof by mutating proof objects in duty object + selectionProofObject.selectionProof = combinedSelection.selectionProof; + } + } + } + } + } } diff --git a/packages/validator/test/unit/services/attestation.test.ts b/packages/validator/test/unit/services/attestation.test.ts index 760ffeddf0..f86e93b1f2 100644 --- a/packages/validator/test/unit/services/attestation.test.ts +++ b/packages/validator/test/unit/services/attestation.test.ts @@ -1,7 +1,6 @@ import {afterEach, beforeEach, describe, expect, it, vi} from "vitest"; import {SecretKey} from "@chainsafe/blst"; import {toHexString} from "@chainsafe/ssz"; -import {routes} from "@lodestar/api"; import {ChainConfig, createChainForkConfig} from "@lodestar/config"; import {config as defaultConfig} from "@lodestar/config/default"; import {ForkName} from "@lodestar/params"; @@ -63,7 +62,6 @@ describe("AttestationService", () => { const testContexts: [string, AttestationServiceOpts, Partial][] = [ ["With default configuration", {}, {}], ["With default configuration post-electra", {}, electraConfig], - ["With distributed aggregation selection enabled", {distributedAggregationSelection: true}, {}], ]; for (const [title, opts, chainConfig] of testContexts) { @@ -105,8 +103,7 @@ describe("AttestationService", () => { validatorIndex: 0, pubkey: pubkeys[0], }, - selectionProof: opts.distributedAggregationSelection ? null : ZERO_HASH, - partialSelectionProof: opts.distributedAggregationSelection ? ZERO_HASH : undefined, + selectionProof: ZERO_HASH, }, ]; @@ -129,16 +126,6 @@ describe("AttestationService", () => { api.beacon.submitPoolAttestationsV2.mockResolvedValue(mockApiResponse({})); api.validator.publishAggregateAndProofsV2.mockResolvedValue(mockApiResponse({})); - if (opts.distributedAggregationSelection) { - // Mock distributed validator middleware client selections endpoint - // and return a selection proof that passes `is_aggregator` test - api.validator.submitBeaconCommitteeSelections.mockResolvedValue( - mockApiResponse({data: [{validatorIndex: 0, slot: 0, selectionProof: Buffer.alloc(1, 0x10)}]}) - ); - // Accept all subscriptions - api.validator.prepareBeaconCommitteeSubnet.mockResolvedValue(mockApiResponse({})); - } - // Mock signing service validatorStore.signAttestation.mockResolvedValue(singleAttestation); validatorStore.signAggregateAndProof.mockResolvedValue(aggregateAndProof); @@ -146,28 +133,6 @@ describe("AttestationService", () => { // Trigger clock onSlot for slot 0 await clock.tickSlotFns(0, controller.signal); - if (opts.distributedAggregationSelection) { - // Must submit partial beacon committee selection proof based on duty - const selection: routes.validator.BeaconCommitteeSelection = { - validatorIndex: 0, - slot: 0, - selectionProof: ZERO_HASH, - }; - expect(api.validator.submitBeaconCommitteeSelections).toHaveBeenCalledOnce(); - expect(api.validator.submitBeaconCommitteeSelections).toHaveBeenCalledWith({selections: [selection]}); - - // Must resubscribe validator as aggregator on beacon committee subnet - const subscription: routes.validator.BeaconCommitteeSubscription = { - validatorIndex: 0, - committeeIndex: 0, - committeesAtSlot: 120, - slot: 0, - isAggregator: true, - }; - expect(api.validator.prepareBeaconCommitteeSubnet).toHaveBeenCalledOnce(); - expect(api.validator.prepareBeaconCommitteeSubnet).toHaveBeenCalledWith({subscriptions: [subscription]}); - } - // Must submit the attestation received through produceAttestationData() expect(api.beacon.submitPoolAttestationsV2).toHaveBeenCalledOnce(); expect(api.beacon.submitPoolAttestationsV2).toHaveBeenCalledWith({signedAttestations: [singleAttestation]}); diff --git a/packages/validator/test/unit/services/attestationDuties.test.ts b/packages/validator/test/unit/services/attestationDuties.test.ts index ecac0dc0f0..26dd002819 100644 --- a/packages/validator/test/unit/services/attestationDuties.test.ts +++ b/packages/validator/test/unit/services/attestationDuties.test.ts @@ -244,6 +244,87 @@ describe("AttestationDutiesService", () => { expect(api.validator.prepareBeaconCommitteeSubnet).toHaveBeenCalledOnce(); }); + it("Should fetch duties with distributed aggregation selection", async () => { + // Reply with some duties + const slot = 1; + const epoch = computeEpochAtSlot(slot); + const duty: routes.validator.AttesterDuty = { + slot: slot, + committeeIndex: 1, + committeeLength: 120, + committeesAtSlot: 120, + validatorCommitteeIndex: 1, + validatorIndex: index, + pubkey: pubkeys[0], + }; + api.validator.getAttesterDuties.mockResolvedValue( + mockApiResponse({data: [duty], meta: {dependentRoot: ZERO_HASH_HEX, executionOptimistic: false}}) + ); + + // Accept all subscriptions + api.validator.prepareBeaconCommitteeSubnet.mockResolvedValue(mockApiResponse({})); + + // Mock distributed validator middleware client selections endpoint + // and return a selection proof that passes `is_aggregator` test + const aggregatorSelectionProof = Buffer.alloc(1, 0x10); + api.validator.submitBeaconCommitteeSelections.mockResolvedValue( + mockApiResponse({data: [{validatorIndex: index, slot, selectionProof: aggregatorSelectionProof}]}) + ); + + // Clock will call runDutiesTasks() immediately + const clock = new ClockMock(); + const syncingStatusTracker = new SyncingStatusTracker(loggerVc, api, clock, null); + const dutiesService = new AttestationDutiesService( + loggerVc, + api, + clock, + validatorStore, + chainHeadTracker, + syncingStatusTracker, + null, + {distributedAggregationSelection: true} + ); + + // Trigger clock onSlot for slot 0 + await clock.tickEpochFns(0, controller.signal); + + // Validator index should be persisted + expect(validatorStore.getAllLocalIndices()).toEqual([index]); + expect(validatorStore.getPubkeyOfIndex(index)).toBe(toHexString(pubkeys[0])); + + // Must submit partial beacon committee selection proofs for current and next epoch + expect(api.validator.submitBeaconCommitteeSelections).toHaveBeenCalledTimes(2); + expect(api.validator.submitBeaconCommitteeSelections).toHaveBeenCalledWith({ + selections: [ + expect.objectContaining({ + validatorIndex: index, + slot, + }), + ], + }); + + // Duties for current epoch should be persisted with selection proof set for aggregator + const dutiesAtEpoch = dutiesService["dutiesByIndexByEpoch"].get(epoch); + expect(dutiesAtEpoch).toBeDefined(); + const dutyAndProof = dutiesAtEpoch?.dutiesByIndex.get(index); + expect(dutyAndProof).toBeDefined(); + expect(dutyAndProof?.duty).toEqual(duty); + // Selection proof should be set since the mocked proof passes `is_aggregator` + expect(dutyAndProof?.selectionProof).toEqual(aggregatorSelectionProof); + + // Must subscribe validator as aggregator on beacon committee subnet + expect(api.validator.prepareBeaconCommitteeSubnet).toHaveBeenCalledOnce(); + expect(api.validator.prepareBeaconCommitteeSubnet).toHaveBeenCalledWith({ + subscriptions: expect.arrayContaining([ + expect.objectContaining({ + validatorIndex: index, + slot, + isAggregator: true, + }), + ]), + }); + }); + describe("Reorg handling", () => { const oldDependentRoot = toRootHex(Buffer.alloc(32, 1)); const newDependentRoot = toRootHex(Buffer.alloc(32, 2)); diff --git a/packages/validator/test/unit/services/syncCommitteDuties.test.ts b/packages/validator/test/unit/services/syncCommitteDuties.test.ts index 6f5215f7de..a824f1d7d1 100644 --- a/packages/validator/test/unit/services/syncCommitteDuties.test.ts +++ b/packages/validator/test/unit/services/syncCommitteDuties.test.ts @@ -340,6 +340,79 @@ describe("SyncCommitteeDutiesService", () => { expect(api.validator.prepareSyncCommitteeSubnets).toHaveBeenCalledOnce(); }); + + it("Should fetch duties with distributed aggregation selection", async () => { + // Reply with some duties + const slot = 1; + const duty: routes.validator.SyncDuty = { + pubkey: pubkeys[0], + validatorIndex: indices[0], + validatorSyncCommitteeIndices: [7], + }; + api.validator.getSyncCommitteeDuties.mockResolvedValue( + mockApiResponse({data: [duty], meta: {executionOptimistic: false}}) + ); + + // Accept all subscriptions + api.validator.prepareSyncCommitteeSubnets.mockResolvedValue(mockApiResponse({})); + + // Mock distributed validator middleware client selections endpoint + // and return a selection proof that passes `is_sync_committee_aggregator` test + const aggregatorSelectionProof = Buffer.alloc(1, 0x19); + api.validator.submitSyncCommitteeSelections.mockResolvedValue( + mockApiResponse({ + data: [{validatorIndex: indices[0], slot, subcommitteeIndex: 0, selectionProof: aggregatorSelectionProof}], + }) + ); + + // Clock will call runDutiesTasks() immediately + const clock = new ClockMock(); + const syncingStatusTracker = new SyncingStatusTracker(loggerVc, api, clock, null); + const dutiesService = new SyncCommitteeDutiesService( + altair0Config, + loggerVc, + api, + clock, + validatorStore, + syncingStatusTracker, + null, + {distributedAggregationSelection: true} + ); + + // Trigger clock onSlot for slot 0 to fetch duties + await clock.tickEpochFns(0, controller.signal); + + // Validator index should be persisted + expect(validatorStore.getAllLocalIndices()).toEqual(indices); + + // Get duties for the slot + const duties = await dutiesService.getDutiesAtSlot(slot); + + // Verify duties are returned with partial selection proofs + expect(duties.length).toBe(1); + expect(duties[0].duty.validatorIndex).toBe(indices[0]); + expect(duties[0].selectionProofs[0].partialSelectionProof).toBeDefined(); + + // Wait for the async DVT task to complete and verify API was called + await vi.waitFor(() => { + expect(api.validator.submitSyncCommitteeSelections).toHaveBeenCalledOnce(); + }); + + // Must submit partial sync committee selection proof based on duty + expect(api.validator.submitSyncCommitteeSelections).toHaveBeenCalledWith( + { + selections: [ + expect.objectContaining({ + validatorIndex: duty.validatorIndex, + slot, + subcommitteeIndex: 0, + }), + ], + }, + expect.any(Object) + ); + expect(duties[0].selectionProofs[0].selectionProof).toBe(aggregatorSelectionProof); + }); }); function toSyncDutySubnet(duty: routes.validator.SyncDuty): SyncDutySubnet { diff --git a/packages/validator/test/unit/services/syncCommittee.test.ts b/packages/validator/test/unit/services/syncCommittee.test.ts index ae07a819d6..d0b9f66cb1 100644 --- a/packages/validator/test/unit/services/syncCommittee.test.ts +++ b/packages/validator/test/unit/services/syncCommittee.test.ts @@ -1,7 +1,6 @@ import {afterEach, beforeEach, describe, expect, it, vi} from "vitest"; import {SecretKey} from "@chainsafe/blst"; import {toHexString} from "@chainsafe/ssz"; -import {routes} from "@lodestar/api"; import {createChainForkConfig} from "@lodestar/config"; import {config as mainnetConfig} from "@lodestar/config/default"; import {ssz} from "@lodestar/types"; @@ -64,10 +63,7 @@ describe("SyncCommitteeService", () => { vi.resetAllMocks(); }); - const testContexts: [string, SyncCommitteeServiceOpts][] = [ - ["With default configuration", {}], - ["With distributed aggregation selection enabled", {distributedAggregationSelection: true}], - ]; + const testContexts: [string, SyncCommitteeServiceOpts][] = [["With default configuration", {}]]; for (const [title, opts] of testContexts) { describe(title, () => { @@ -99,8 +95,7 @@ describe("SyncCommitteeService", () => { }, selectionProofs: [ { - selectionProof: opts.distributedAggregationSelection ? null : ZERO_HASH, - partialSelectionProof: opts.distributedAggregationSelection ? ZERO_HASH : undefined, + selectionProof: ZERO_HASH, subcommitteeIndex: 0, }, ], @@ -125,16 +120,6 @@ describe("SyncCommitteeService", () => { api.validator.produceSyncCommitteeContribution.mockResolvedValue(mockApiResponse({data: contribution})); api.validator.publishContributionAndProofs.mockResolvedValue(mockApiResponse({})); - if (opts.distributedAggregationSelection) { - // Mock distributed validator middleware client selections endpoint - // and return a selection proof that passes `is_sync_committee_aggregator` test - api.validator.submitSyncCommitteeSelections.mockResolvedValue( - mockApiResponse({ - data: [{validatorIndex: 0, slot: 0, subcommitteeIndex: 0, selectionProof: Buffer.alloc(1, 0x19)}], - }) - ); - } - // Mock signing service validatorStore.signSyncCommitteeSignature.mockResolvedValue(syncCommitteeSignature); validatorStore.signContributionAndProof.mockResolvedValue(contributionAndProof); @@ -142,18 +127,6 @@ describe("SyncCommitteeService", () => { // Trigger clock onSlot for slot 0 await clock.tickSlotFns(0, controller.signal); - if (opts.distributedAggregationSelection) { - // Must submit partial sync committee selection proof based on duty - const selection: routes.validator.SyncCommitteeSelection = { - validatorIndex: 0, - slot: 0, - subcommitteeIndex: 0, - selectionProof: ZERO_HASH, - }; - expect(api.validator.submitSyncCommitteeSelections).toHaveBeenCalledOnce(); - expect(api.validator.submitSyncCommitteeSelections).toHaveBeenCalledWith({selections: [selection]}); - } - // Must submit the signature received through signSyncCommitteeSignature() expect(api.beacon.submitPoolSyncCommitteeSignatures).toHaveBeenCalledOnce(); expect(api.beacon.submitPoolSyncCommitteeSignatures).toHaveBeenCalledWith({ From 39dac0f03d5c70ca9314945486f5095ed1563d35 Mon Sep 17 00:00:00 2001 From: Nico Flaig Date: Fri, 19 Dec 2025 19:03:16 +0100 Subject: [PATCH 14/20] fix: avoid calling committee selection apis if there are no duties (#8708) Since https://github.com/ChainSafe/lodestar/pull/8669 we might call the committee selection apis even if we don't have any duties which is unnecessary and charon doesn't like it. ``` lodestar-1 | Dec-19 16:16:47.001[] error: Error on sync committee aggregation selection slot=13278082 - JSON is not an array lodestar-1 | Error: JSON is not an array lodestar-1 | at value_fromJsonArray (file:///usr/app/node_modules/@chainsafe/ssz/src/type/arrayBasic.ts:162:11) lodestar-1 | at ListCompositeType.fromJson (file:///usr/app/node_modules/@chainsafe/ssz/src/type/array.ts:121:12) lodestar-1 | at ApiResponse.value (file:///usr/app/packages/api/src/utils/client/response.ts:115:51) lodestar-1 | at SyncCommitteeDutiesService.runDistributedAggregationSelectionTasks (file:///usr/app/packages/validator/src/services/syncCommitteeDuties.ts:385:36) lodestar-1 | at processTicksAndRejections (node:internal/process/task_queues:103:5) ``` --- packages/validator/src/services/attestationDuties.ts | 4 ++++ packages/validator/src/services/syncCommitteeDuties.ts | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/packages/validator/src/services/attestationDuties.ts b/packages/validator/src/services/attestationDuties.ts index eed42e8a5a..30520d1400 100644 --- a/packages/validator/src/services/attestationDuties.ts +++ b/packages/validator/src/services/attestationDuties.ts @@ -453,6 +453,10 @@ export class AttestationDutiesService { * 3. Mutate duty objects to set selection proofs for aggregators */ private async runDistributedAggregationSelectionTasks(duties: AttDutyAndProof[], epoch: Epoch): Promise { + if (duties.length === 0) { + return; + } + const partialSelections: routes.validator.BeaconCommitteeSelection[] = duties.map( ({duty, partialSelectionProof}) => ({ validatorIndex: duty.validatorIndex, diff --git a/packages/validator/src/services/syncCommitteeDuties.ts b/packages/validator/src/services/syncCommitteeDuties.ts index d8494c81a9..36151e52fe 100644 --- a/packages/validator/src/services/syncCommitteeDuties.ts +++ b/packages/validator/src/services/syncCommitteeDuties.ts @@ -355,6 +355,10 @@ export class SyncCommitteeDutiesService { * 3. Mutate duty objects to set selection proofs for aggregators */ private async runDistributedAggregationSelectionTasks(duties: SyncDutyAndProofs[], slot: number): Promise { + if (duties.length === 0) { + return; + } + const partialSelections: routes.validator.SyncCommitteeSelection[] = []; for (const {duty, selectionProofs} of duties) { From 7e9e7caf3835a2af5909f18adb39cae3b4c566e3 Mon Sep 17 00:00:00 2001 From: Nico Flaig Date: Fri, 19 Dec 2025 21:44:12 +0100 Subject: [PATCH 15/20] chore: log aggregation selection errors to debug (#8709) These errors aren't really critical and might be common right now because we moved from per slot to per epoch in https://github.com/ChainSafe/lodestar/pull/8669 and not all validator clients doing the same will cause calls to time out if signature threshold in DVT middleware is not reached. --- packages/validator/src/services/attestationDuties.ts | 6 +++--- packages/validator/src/services/syncCommitteeDuties.ts | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/validator/src/services/attestationDuties.ts b/packages/validator/src/services/attestationDuties.ts index 30520d1400..6fcb2cc423 100644 --- a/packages/validator/src/services/attestationDuties.ts +++ b/packages/validator/src/services/attestationDuties.ts @@ -211,7 +211,7 @@ export class AttestationDutiesService { // This is required to correctly determine if validator is aggregator and to produce // a AggregateAndProof that can be threshold aggregated by the middleware client. await this.runDistributedAggregationSelectionTasks(Array.from(epochDuties.values()), epoch).catch((e) => - this.logger.error("Error on attestation aggregation selection", {epoch}, e) + this.logger.debug("Error on attestation aggregation selection", {epoch}, e) ); } @@ -380,7 +380,7 @@ export class AttestationDutiesService { if (epochDuties) { if (this.opts?.distributedAggregationSelection) { await this.runDistributedAggregationSelectionTasks(Array.from(epochDuties.values()), dutyEpoch).catch((e) => - this.logger.error("Error on attestation aggregation selection after duties reorg", logContext, e) + this.logger.debug("Error on attestation aggregation selection after duties reorg", logContext, e) ); } @@ -479,7 +479,7 @@ export class AttestationDutiesService { const combinedSelection = combinedSelections.find((s) => s.validatorIndex === validatorIndex && s.slot === slot); if (!combinedSelection) { - this.logger.warn("Did not receive combined beacon committee selection proof", logCtxValidator); + this.logger.debug("Did not receive combined beacon committee selection proof", logCtxValidator); continue; } diff --git a/packages/validator/src/services/syncCommitteeDuties.ts b/packages/validator/src/services/syncCommitteeDuties.ts index 36151e52fe..b99ae1f47d 100644 --- a/packages/validator/src/services/syncCommitteeDuties.ts +++ b/packages/validator/src/services/syncCommitteeDuties.ts @@ -143,7 +143,7 @@ export class SyncCommitteeDutiesService { // sync committee contributions as it is required to correctly determine if validator is aggregator // and to produce a ContributionAndProof that can be threshold aggregated by the middleware client. this.runDistributedAggregationSelectionTasks(duties, slot).catch((e) => - this.logger.error("Error on sync committee aggregation selection", {slot}, e) + this.logger.debug("Error on sync committee aggregation selection", {slot}, e) ); } } @@ -400,7 +400,7 @@ export class SyncCommitteeDutiesService { ); if (!combinedSelection) { - this.logger.warn("Did not receive combined sync committee selection proof", logCtxValidator); + this.logger.debug("Did not receive combined sync committee selection proof", logCtxValidator); continue; } From b255111a2013d43d5f65889274294e2740493c28 Mon Sep 17 00:00:00 2001 From: twoeths <10568965+twoeths@users.noreply.github.com> Date: Mon, 22 Dec 2025 13:16:12 +0700 Subject: [PATCH 16/20] refactor: pass validators pubkey-index map from cli (#8707) **Motivation** - we will not be able to access `pubkey2index` or `index2pubkey` once we switch to a native state-transition so we need to be prepared for that **Description** - pass `pubkey2index`, `index2pubkey` from cli instead - in the future, we should find a way to extract them given a BeaconState so that we don't have to depend on any implementations of BeaconStateView, see https://github.com/ChainSafe/lodestar/issues/8706#issue-3741320691 Closes #8652 --------- Co-authored-by: Tuyen Nguyen --- packages/beacon-node/src/chain/chain.ts | 52 +++++++------------ packages/beacon-node/src/node/nodejs.ts | 11 +++- .../produceBlock/produceBlockBody.test.ts | 2 + .../perf/chain/verifyImportBlocks.test.ts | 2 + .../test/spec/presets/fork_choice.test.ts | 33 ++++++++++-- .../test/utils/networkWithMockDb.ts | 19 ++++++- .../beacon-node/test/utils/node/beacon.ts | 26 +++++++++- packages/cli/src/cmds/beacon/handler.ts | 19 ++++++- packages/state-transition/src/index.ts | 3 +- 9 files changed, 121 insertions(+), 46 deletions(-) diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index ac6037b1f6..c8b8dade5c 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -17,10 +17,8 @@ import { computeEndSlotAtEpoch, computeEpochAtSlot, computeStartSlotAtEpoch, - createCachedBeaconState, getEffectiveBalanceIncrementsZeroInactive, getEffectiveBalancesFromStateBytes, - isCachedBeaconState, processSlots, } from "@lodestar/state-transition"; import { @@ -204,6 +202,8 @@ export class BeaconChain implements IBeaconChain { { privateKey, config, + pubkey2index, + index2pubkey, db, dbName, dataDir, @@ -219,6 +219,8 @@ export class BeaconChain implements IBeaconChain { }: { privateKey: PrivateKey; config: BeaconConfig; + pubkey2index: PubkeyIndexMap; + index2pubkey: Index2PubkeyCache; db: IBeaconDb; dbName: string; dataDir: string; @@ -228,7 +230,7 @@ export class BeaconChain implements IBeaconChain { clock?: IClock; metrics: Metrics | null; validatorMonitor: ValidatorMonitor | null; - anchorState: BeaconStateAllForks; + anchorState: CachedBeaconStateAllForks; isAnchorStateFinalized: boolean; executionEngine: IExecutionEngine; executionBuilder?: IExecutionBuilder; @@ -287,39 +289,25 @@ export class BeaconChain implements IBeaconChain { logger, }); - // Restore state caches - // anchorState may already by a CachedBeaconState. If so, don't create the cache again, since deserializing all - // pubkeys takes ~30 seconds for 350k keys (mainnet 2022Q2). - // When the BeaconStateCache is created in initializeBeaconStateFromEth1 it may be incorrect. Until we can ensure that - // it's safe to re-use _ANY_ BeaconStateCache, this option is disabled by default and only used in tests. - const cachedState = - isCachedBeaconState(anchorState) && opts.skipCreateStateCacheIfAvailable - ? anchorState - : createCachedBeaconState(anchorState, { - config, - pubkey2index: new PubkeyIndexMap(), - index2pubkey: [], - }); - this._earliestAvailableSlot = cachedState.slot; - - this.shufflingCache = cachedState.epochCtx.shufflingCache = new ShufflingCache(metrics, logger, this.opts, [ + this._earliestAvailableSlot = anchorState.slot; + this.shufflingCache = anchorState.epochCtx.shufflingCache = new ShufflingCache(metrics, logger, this.opts, [ { - shuffling: cachedState.epochCtx.previousShuffling, - decisionRoot: cachedState.epochCtx.previousDecisionRoot, + shuffling: anchorState.epochCtx.previousShuffling, + decisionRoot: anchorState.epochCtx.previousDecisionRoot, }, { - shuffling: cachedState.epochCtx.currentShuffling, - decisionRoot: cachedState.epochCtx.currentDecisionRoot, + shuffling: anchorState.epochCtx.currentShuffling, + decisionRoot: anchorState.epochCtx.currentDecisionRoot, }, { - shuffling: cachedState.epochCtx.nextShuffling, - decisionRoot: cachedState.epochCtx.nextDecisionRoot, + shuffling: anchorState.epochCtx.nextShuffling, + decisionRoot: anchorState.epochCtx.nextDecisionRoot, }, ]); - // Persist single global instance of state caches - this.pubkey2index = cachedState.epochCtx.pubkey2index; - this.index2pubkey = cachedState.epochCtx.index2pubkey; + // Global cache of validators pubkey/index mapping + this.pubkey2index = pubkey2index; + this.index2pubkey = index2pubkey; const fileDataStore = opts.nHistoricalStatesFileDataStore ?? true; const blockStateCache = this.opts.nHistoricalStates @@ -350,15 +338,15 @@ export class BeaconChain implements IBeaconChain { } const {checkpoint} = computeAnchorCheckpoint(config, anchorState); - blockStateCache.add(cachedState); - blockStateCache.setHeadState(cachedState); - checkpointStateCache.add(checkpoint, cachedState); + blockStateCache.add(anchorState); + blockStateCache.setHeadState(anchorState); + checkpointStateCache.add(checkpoint, anchorState); const forkChoice = initializeForkChoice( config, emitter, clock.currentSlot, - cachedState, + anchorState, isAnchorStateFinalized, opts, this.justifiedBalancesGetter.bind(this), diff --git a/packages/beacon-node/src/node/nodejs.ts b/packages/beacon-node/src/node/nodejs.ts index 5957c81695..90c6f9f934 100644 --- a/packages/beacon-node/src/node/nodejs.ts +++ b/packages/beacon-node/src/node/nodejs.ts @@ -2,10 +2,11 @@ import {setMaxListeners} from "node:events"; import {PrivateKey} from "@libp2p/interface"; import {Registry} from "prom-client"; import {hasher} from "@chainsafe/persistent-merkle-tree"; +import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; import {BeaconApiMethods} from "@lodestar/api/beacon/server"; import {BeaconConfig} from "@lodestar/config"; import type {LoggerNode} from "@lodestar/logger/node"; -import {BeaconStateAllForks} from "@lodestar/state-transition"; +import {CachedBeaconStateAllForks, Index2PubkeyCache} from "@lodestar/state-transition"; import {phase0} from "@lodestar/types"; import {sleep} from "@lodestar/utils"; import {ProcessShutdownCallback} from "@lodestar/validator"; @@ -45,13 +46,15 @@ export type BeaconNodeModules = { export type BeaconNodeInitModules = { opts: IBeaconNodeOptions; config: BeaconConfig; + pubkey2index: PubkeyIndexMap; + index2pubkey: Index2PubkeyCache; db: IBeaconDb; logger: LoggerNode; processShutdownCallback: ProcessShutdownCallback; privateKey: PrivateKey; dataDir: string; peerStoreDir?: string; - anchorState: BeaconStateAllForks; + anchorState: CachedBeaconStateAllForks; isAnchorStateFinalized: boolean; wsCheckpoint?: phase0.Checkpoint; metricsRegistries?: Registry[]; @@ -146,6 +149,8 @@ export class BeaconNode { static async init({ opts, config, + pubkey2index, + index2pubkey, db, logger, processShutdownCallback, @@ -220,6 +225,8 @@ export class BeaconNode { privateKey, config, clock, + pubkey2index, + index2pubkey, dataDir, db, dbName: opts.db.name, diff --git a/packages/beacon-node/test/perf/chain/produceBlock/produceBlockBody.test.ts b/packages/beacon-node/test/perf/chain/produceBlock/produceBlockBody.test.ts index 26c685480d..8262fd8a75 100644 --- a/packages/beacon-node/test/perf/chain/produceBlock/produceBlockBody.test.ts +++ b/packages/beacon-node/test/perf/chain/produceBlock/produceBlockBody.test.ts @@ -39,6 +39,8 @@ describe("produceBlockBody", () => { { privateKey: await generateKeyPair("secp256k1"), config: state.config, + pubkey2index: state.epochCtx.pubkey2index, + index2pubkey: state.epochCtx.index2pubkey, db, dataDir: ".", dbName: ".", diff --git a/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts b/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts index 793b24564d..3a75d58d9f 100644 --- a/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts +++ b/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts @@ -91,6 +91,8 @@ describe.skip("verify+import blocks - range sync perf test", () => { { privateKey: await generateKeyPair("secp256k1"), config: state.config, + pubkey2index: state.epochCtx.pubkey2index, + index2pubkey: state.epochCtx.index2pubkey, db, dataDir: ".", dbName: ".", diff --git a/packages/beacon-node/test/spec/presets/fork_choice.test.ts b/packages/beacon-node/test/spec/presets/fork_choice.test.ts index dedb898934..e8faa4d1af 100644 --- a/packages/beacon-node/test/spec/presets/fork_choice.test.ts +++ b/packages/beacon-node/test/spec/presets/fork_choice.test.ts @@ -1,6 +1,7 @@ import path from "node:path"; import {generateKeyPair} from "@libp2p/crypto/keys"; import {expect} from "vitest"; +import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; import {toHexString} from "@chainsafe/ssz"; import {createBeaconConfig} from "@lodestar/config"; import {CheckpointWithHex, ForkChoice} from "@lodestar/fork-choice"; @@ -14,7 +15,14 @@ import { ForkSeq, } from "@lodestar/params"; import {InputType} from "@lodestar/spec-test-util"; -import {BeaconStateAllForks, isExecutionStateType, signedBlockToSignedHeader} from "@lodestar/state-transition"; +import { + BeaconStateAllForks, + Index2PubkeyCache, + createCachedBeaconState, + isExecutionStateType, + signedBlockToSignedHeader, + syncPubkeys, +} from "@lodestar/state-transition"; import { Attestation, AttesterSlashing, @@ -45,7 +53,6 @@ import {computePreFuluKzgCommitmentsInclusionProof} from "../../../src/util/blob import {ClockEvent} from "../../../src/util/clock.js"; import {ClockStopped} from "../../mocks/clock.js"; import {getMockedBeaconDb} from "../../mocks/mockedBeaconDb.js"; -import {createCachedBeaconStateTest} from "../../utils/cachedBeaconState.js"; import {getConfig} from "../../utils/config.js"; import {testLogger} from "../../utils/logger.js"; import {assertCorrectProgressiveBalances} from "../config.js"; @@ -71,7 +78,7 @@ const forkChoiceTest = const {steps, anchorState} = testcase; const currentSlot = anchorState.slot; const config = getConfig(fork); - const state = createCachedBeaconStateTest(anchorState, config); + // const state = createCachedBeaconStateTest(anchorState, config); /** This is to track test's tickTime to be used in proposer boost */ let tickTime = 0; @@ -89,6 +96,20 @@ const forkChoiceTest = logger: testLogger("executionEngine"), }); + const beaconConfig = createBeaconConfig(config, anchorState.genesisValidatorsRoot); + const pubkey2index = new PubkeyIndexMap(); + const index2pubkey: Index2PubkeyCache = []; + syncPubkeys(anchorState.validators.getAllReadonlyValues(), pubkey2index, index2pubkey); + const cachedState = createCachedBeaconState( + anchorState, + { + config: beaconConfig, + pubkey2index, + index2pubkey, + }, + {skipSyncPubkeys: true} + ); + const chain = new BeaconChain( { ...defaultChainOptions, @@ -111,7 +132,9 @@ const forkChoiceTest = }, { privateKey: await generateKeyPair("secp256k1"), - config: createBeaconConfig(config, state.genesisValidatorsRoot), + config: beaconConfig, + pubkey2index, + index2pubkey, db: getMockedBeaconDb(), dataDir: ".", dbName: ",", @@ -120,7 +143,7 @@ const forkChoiceTest = clock, metrics: null, validatorMonitor: null, - anchorState, + anchorState: cachedState, isAnchorStateFinalized: true, executionEngine, executionBuilder: undefined, diff --git a/packages/beacon-node/test/utils/networkWithMockDb.ts b/packages/beacon-node/test/utils/networkWithMockDb.ts index 3cbf1563d6..edf388a1fd 100644 --- a/packages/beacon-node/test/utils/networkWithMockDb.ts +++ b/packages/beacon-node/test/utils/networkWithMockDb.ts @@ -1,5 +1,7 @@ import {generateKeyPair} from "@libp2p/crypto/keys"; +import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; import {ChainForkConfig, createBeaconConfig} from "@lodestar/config"; +import {Index2PubkeyCache, createCachedBeaconState, syncPubkeys} from "@lodestar/state-transition"; import {ssz} from "@lodestar/types"; import {sleep} from "@lodestar/utils"; import {BeaconChain} from "../../src/chain/chain.js"; @@ -9,7 +11,6 @@ import {GossipHandlers, Network, NetworkInitModules, getReqRespHandlers} from ". import {NetworkOptions, defaultNetworkOptions} from "../../src/network/options.js"; import {GetReqRespHandlerFn} from "../../src/network/reqresp/types.js"; import {getMockedBeaconDb} from "../mocks/mockedBeaconDb.js"; -import {createCachedBeaconStateTest} from "./cachedBeaconState.js"; import {ClockStatic} from "./clock.js"; import {testLogger} from "./logger.js"; import {generateState} from "./state.js"; @@ -42,6 +43,18 @@ export async function getNetworkForTest( ); const beaconConfig = createBeaconConfig(config, state.genesisValidatorsRoot); + const pubkey2index = new PubkeyIndexMap(); + const index2pubkey: Index2PubkeyCache = []; + syncPubkeys(state.validators.getAllReadonlyValues(), pubkey2index, index2pubkey); + const cachedState = createCachedBeaconState( + state, + { + config: beaconConfig, + pubkey2index, + index2pubkey, + }, + {skipSyncPubkeys: true} + ); const db = getMockedBeaconDb(); const privateKey = await generateKeyPair("secp256k1"); @@ -60,6 +73,8 @@ export async function getNetworkForTest( { privateKey, config: beaconConfig, + pubkey2index, + index2pubkey, db, dataDir: ".", dbName: ".", @@ -73,7 +88,7 @@ export async function getNetworkForTest( ), metrics: null, validatorMonitor: null, - anchorState: createCachedBeaconStateTest(state, beaconConfig), + anchorState: cachedState, isAnchorStateFinalized: true, executionEngine: new ExecutionEngineDisabled(), } diff --git a/packages/beacon-node/test/utils/node/beacon.ts b/packages/beacon-node/test/utils/node/beacon.ts index 264b1df14c..1629d35045 100644 --- a/packages/beacon-node/test/utils/node/beacon.ts +++ b/packages/beacon-node/test/utils/node/beacon.ts @@ -4,12 +4,19 @@ import deepmerge from "deepmerge"; import tmp from "tmp"; import {setHasher} from "@chainsafe/persistent-merkle-tree"; import {hasher} from "@chainsafe/persistent-merkle-tree/hasher/hashtree"; +import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; import {ChainConfig, createBeaconConfig, createChainForkConfig} from "@lodestar/config"; import {config as minimalConfig} from "@lodestar/config/default"; import {LevelDbController} from "@lodestar/db/controller/level"; import {LoggerNode} from "@lodestar/logger/node"; import {ForkSeq, GENESIS_SLOT, SLOTS_PER_EPOCH, ZERO_HASH_HEX} from "@lodestar/params"; -import {BeaconStateAllForks, computeTimeAtSlot} from "@lodestar/state-transition"; +import { + BeaconStateAllForks, + Index2PubkeyCache, + computeTimeAtSlot, + createCachedBeaconState, + syncPubkeys, +} from "@lodestar/state-transition"; import {phase0, ssz} from "@lodestar/types"; import {RecursivePartial, isPlainObject, toRootHex} from "@lodestar/utils"; import {BeaconDb} from "../../../src/db/index.js"; @@ -116,16 +123,31 @@ export async function getDevBeaconNode( ); const beaconConfig = createBeaconConfig(config, anchorState.genesisValidatorsRoot); + const pubkey2index = new PubkeyIndexMap(); + const index2pubkey: Index2PubkeyCache = []; + syncPubkeys(anchorState.validators.getAllReadonlyValues(), pubkey2index, index2pubkey); + const cachedState = createCachedBeaconState( + anchorState, + { + config: beaconConfig, + pubkey2index, + index2pubkey, + }, + {skipSyncPubkeys: true} + ); + return BeaconNode.init({ opts: options as IBeaconNodeOptions, config: beaconConfig, + pubkey2index, + index2pubkey, db, logger, processShutdownCallback: () => {}, privateKey, dataDir: ".", peerStoreDir, - anchorState, + anchorState: cachedState, wsCheckpoint: opts.wsCheckpoint, isAnchorStateFinalized: true, }); diff --git a/packages/cli/src/cmds/beacon/handler.ts b/packages/cli/src/cmds/beacon/handler.ts index a502b261ca..93f9141a18 100644 --- a/packages/cli/src/cmds/beacon/handler.ts +++ b/packages/cli/src/cmds/beacon/handler.ts @@ -2,11 +2,13 @@ import path from "node:path"; import {getHeapStatistics} from "node:v8"; import {SignableENR} from "@chainsafe/enr"; import {hasher} from "@chainsafe/persistent-merkle-tree"; +import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; import {BeaconDb, BeaconNode} from "@lodestar/beacon-node"; import {ChainForkConfig, createBeaconConfig} from "@lodestar/config"; import {LevelDbController} from "@lodestar/db/controller/level"; import {LoggerNode, getNodeLogger} from "@lodestar/logger/node"; import {ACTIVE_PRESET, PresetName} from "@lodestar/params"; +import {Index2PubkeyCache, createCachedBeaconState, syncPubkeys} from "@lodestar/state-transition"; import {ErrorAborted, bytesToInt, formatBytes} from "@lodestar/utils"; import {ProcessShutdownCallback} from "@lodestar/validator"; import {BeaconNodeOptions, getBeaconConfigFromArgs} from "../../config/index.js"; @@ -78,16 +80,31 @@ export async function beaconHandler(args: BeaconArgs & GlobalArgs): Promise Date: Wed, 31 Dec 2025 22:08:51 +0700 Subject: [PATCH 17/20] fix: simplify getBlockSignatureSets api (#8720) **Motivation** - we use the whole CachedBeaconStateAllForks to get all block signatures, turn out we only need the validator indices of the current SyncCommittee **Description** given this `getConfig` api: ```typescript getDomain(domainSlot: Slot, domainType: DomainType, messageSlot?: Slot): Uint8Array ``` we currently pass `state.slot` as the 1st param. However it's the same to `block.slot` in `state-transition` and the same epoch when we verify blocks in batch in [beacon-node](https://github.com/ChainSafe/lodestar/blob/b255111a2013d43d5f65889274294e2740493c28/packages/beacon-node/src/chain/blocks/verifyBlock.ts#L62) - so we can just use `block.slot` instead of passing the whole CachedBeaconStateAllForks in `getBlockSignatureSets()` api - still have to pass in `currentSyncCommitteeIndexed` instead part of #8650 --------- Co-authored-by: Tuyen Nguyen --- .../chain/blocks/verifyBlocksSignatures.ts | 14 ++++++++--- .../src/chain/validation/attesterSlashing.ts | 7 +++++- .../beacon-node/src/chain/validation/block.ts | 2 +- .../src/chain/validation/proposerSlashing.ts | 7 +++++- .../src/chain/validation/voluntaryExit.ts | 2 +- .../beacon-node/src/sync/backfill/verify.ts | 3 +-- .../src/block/isValidIndexedAttestation.ts | 6 +++-- .../src/block/processAttestationsAltair.ts | 2 +- .../src/block/processProposerSlashing.ts | 2 +- .../src/block/processRandao.ts | 4 ++-- .../src/block/processSyncCommittee.ts | 11 +++++---- .../src/block/processVoluntaryExit.ts | 2 +- .../src/signatureSets/attesterSlashings.ts | 19 +++++++-------- .../src/signatureSets/index.ts | 22 +++++++++--------- .../src/signatureSets/indexedAttestation.ts | 23 ++++++++++--------- .../src/signatureSets/proposer.ts | 9 ++++---- .../src/signatureSets/proposerSlashings.ts | 13 ++++++----- .../src/signatureSets/randao.ts | 9 ++++---- .../src/signatureSets/voluntaryExits.ts | 19 +++++++-------- .../state-transition/src/stateTransition.ts | 5 +--- .../unit/signatureSets/signatureSets.test.ts | 2 +- 21 files changed, 103 insertions(+), 80 deletions(-) diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts index af63e01f84..19783c8957 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts @@ -28,6 +28,7 @@ export async function verifyBlocksSignatures( ): Promise<{verifySignaturesTime: number}> { const isValidPromises: Promise[] = []; const recvToValLatency = Date.now() / 1000 - (opts.seenTimestampSec ?? Date.now() / 1000); + const currentSyncCommitteeIndexed = preState0.epochCtx.currentSyncCommitteeIndexed; // Verifies signatures after running state transition, so all SyncCommittee signed roots are known at this point. // We must ensure block.slot <= state.slot before running getAllBlockSignatureSets(). @@ -41,9 +42,16 @@ export async function verifyBlocksSignatures( : // // Verify signatures per block to track which block is invalid bls.verifySignatureSets( - getBlockSignatureSets(config, index2pubkey, preState0, block, indexedAttestationsByBlock[i], { - skipProposerSignature: opts.validProposerSignature, - }) + getBlockSignatureSets( + config, + index2pubkey, + currentSyncCommitteeIndexed, + block, + indexedAttestationsByBlock[i], + { + skipProposerSignature: opts.validProposerSignature, + } + ) ); // getBlockSignatureSets() takes 45ms in benchmarks for 2022Q2 mainnet blocks (100 sigs). When syncing a 32 blocks diff --git a/packages/beacon-node/src/chain/validation/attesterSlashing.ts b/packages/beacon-node/src/chain/validation/attesterSlashing.ts index abdca9a420..99b5ce474a 100644 --- a/packages/beacon-node/src/chain/validation/attesterSlashing.ts +++ b/packages/beacon-node/src/chain/validation/attesterSlashing.ts @@ -51,7 +51,12 @@ export async function validateAttesterSlashing( }); } - const signatureSets = getAttesterSlashingSignatureSets(chain.config, chain.index2pubkey, state, attesterSlashing); + const signatureSets = getAttesterSlashingSignatureSets( + chain.config, + chain.index2pubkey, + state.slot, + attesterSlashing + ); if (!(await chain.bls.verifySignatureSets(signatureSets, {batchable: true, priority: prioritizeBls}))) { throw new AttesterSlashingError(GossipAction.REJECT, { code: AttesterSlashingErrorCode.INVALID, diff --git a/packages/beacon-node/src/chain/validation/block.ts b/packages/beacon-node/src/chain/validation/block.ts index 905096e79c..24b23aa43f 100644 --- a/packages/beacon-node/src/chain/validation/block.ts +++ b/packages/beacon-node/src/chain/validation/block.ts @@ -154,7 +154,7 @@ export async function validateGossipBlock( // [REJECT] The proposer signature, signed_beacon_block.signature, is valid with respect to the proposer_index pubkey. if (!chain.seenBlockInputCache.isVerifiedProposerSignature(blockSlot, blockRoot, signedBlock.signature)) { - const signatureSet = getBlockProposerSignatureSet(chain.config, chain.index2pubkey, blockState, signedBlock); + const signatureSet = getBlockProposerSignatureSet(chain.config, chain.index2pubkey, signedBlock); // Don't batch so verification is not delayed if (!(await chain.bls.verifySignatureSets([signatureSet], {verifyOnMainThread: true}))) { throw new BlockGossipError(GossipAction.REJECT, { diff --git a/packages/beacon-node/src/chain/validation/proposerSlashing.ts b/packages/beacon-node/src/chain/validation/proposerSlashing.ts index 4d36295b75..231c5d5bf1 100644 --- a/packages/beacon-node/src/chain/validation/proposerSlashing.ts +++ b/packages/beacon-node/src/chain/validation/proposerSlashing.ts @@ -44,7 +44,12 @@ async function validateProposerSlashing( }); } - const signatureSets = getProposerSlashingSignatureSets(chain.config, chain.index2pubkey, state, proposerSlashing); + const signatureSets = getProposerSlashingSignatureSets( + chain.config, + chain.index2pubkey, + state.slot, + proposerSlashing + ); if (!(await chain.bls.verifySignatureSets(signatureSets, {batchable: true, priority: prioritizeBls}))) { throw new ProposerSlashingError(GossipAction.REJECT, { code: ProposerSlashingErrorCode.INVALID, diff --git a/packages/beacon-node/src/chain/validation/voluntaryExit.ts b/packages/beacon-node/src/chain/validation/voluntaryExit.ts index b72d9d6260..537738053a 100644 --- a/packages/beacon-node/src/chain/validation/voluntaryExit.ts +++ b/packages/beacon-node/src/chain/validation/voluntaryExit.ts @@ -59,7 +59,7 @@ async function validateVoluntaryExit( }); } - const signatureSet = getVoluntaryExitSignatureSet(chain.config, chain.index2pubkey, state, voluntaryExit); + const signatureSet = getVoluntaryExitSignatureSet(chain.config, chain.index2pubkey, state.slot, voluntaryExit); if (!(await chain.bls.verifySignatureSets([signatureSet], {batchable: true, priority: prioritizeBls}))) { throw new VoluntaryExitError(GossipAction.REJECT, { code: VoluntaryExitErrorCode.INVALID_SIGNATURE, diff --git a/packages/beacon-node/src/sync/backfill/verify.ts b/packages/beacon-node/src/sync/backfill/verify.ts index 262f694125..b678bc54de 100644 --- a/packages/beacon-node/src/sync/backfill/verify.ts +++ b/packages/beacon-node/src/sync/backfill/verify.ts @@ -55,8 +55,7 @@ export async function verifyBlockProposerSignature( if (blocks.length === 1 && blocks[0].message.slot === GENESIS_SLOT) return; const signatures = blocks.reduce((sigs: ISignatureSet[], block) => { // genesis block doesn't have valid signature - if (block.message.slot !== GENESIS_SLOT) - sigs.push(getBlockProposerSignatureSet(config, index2pubkey, state, block)); + if (block.message.slot !== GENESIS_SLOT) sigs.push(getBlockProposerSignatureSet(config, index2pubkey, block)); return sigs; }, []); diff --git a/packages/state-transition/src/block/isValidIndexedAttestation.ts b/packages/state-transition/src/block/isValidIndexedAttestation.ts index 26b9db9192..4cb09b0e46 100644 --- a/packages/state-transition/src/block/isValidIndexedAttestation.ts +++ b/packages/state-transition/src/block/isValidIndexedAttestation.ts @@ -21,7 +21,7 @@ export function isValidIndexedAttestation( } if (verifySignature) { - return verifySignatureSet(getIndexedAttestationSignatureSet(config, index2pubkey, state, indexedAttestation)); + return verifySignatureSet(getIndexedAttestationSignatureSet(config, index2pubkey, state.slot, indexedAttestation)); } return true; } @@ -38,7 +38,9 @@ export function isValidIndexedAttestationBigint( } if (verifySignature) { - return verifySignatureSet(getIndexedAttestationBigintSignatureSet(config, index2pubkey, state, indexedAttestation)); + return verifySignatureSet( + getIndexedAttestationBigintSignatureSet(config, index2pubkey, state.slot, indexedAttestation) + ); } return true; } diff --git a/packages/state-transition/src/block/processAttestationsAltair.ts b/packages/state-transition/src/block/processAttestationsAltair.ts index 9218d22114..3cf366bb83 100644 --- a/packages/state-transition/src/block/processAttestationsAltair.ts +++ b/packages/state-transition/src/block/processAttestationsAltair.ts @@ -67,7 +67,7 @@ export function processAttestationsAltair( const sigSet = getAttestationWithIndicesSignatureSet( state.config, epochCtx.index2pubkey, - state, + state.slot, attestation, attestingIndices ); diff --git a/packages/state-transition/src/block/processProposerSlashing.ts b/packages/state-transition/src/block/processProposerSlashing.ts index b51fcb08b5..aec8047c75 100644 --- a/packages/state-transition/src/block/processProposerSlashing.ts +++ b/packages/state-transition/src/block/processProposerSlashing.ts @@ -80,7 +80,7 @@ export function assertValidProposerSlashing( const signatureSets = getProposerSlashingSignatureSets( state.config, state.epochCtx.index2pubkey, - state, + state.slot, proposerSlashing ); for (let i = 0; i < signatureSets.length; i++) { diff --git a/packages/state-transition/src/block/processRandao.ts b/packages/state-transition/src/block/processRandao.ts index 1f0ef25235..b43d12bdaa 100644 --- a/packages/state-transition/src/block/processRandao.ts +++ b/packages/state-transition/src/block/processRandao.ts @@ -12,12 +12,12 @@ import {getRandaoMix} from "../util/index.js"; * PERF: Fixed work independent of block contents. */ export function processRandao(state: CachedBeaconStateAllForks, block: BeaconBlock, verifySignature = true): void { - const {epochCtx} = state; + const {epochCtx, config} = state; const epoch = epochCtx.epoch; const randaoReveal = block.body.randaoReveal; // verify RANDAO reveal - if (verifySignature && !verifyRandaoSignature(state.config, epochCtx.index2pubkey, state, block)) { + if (verifySignature && !verifyRandaoSignature(config, epochCtx.index2pubkey, block)) { throw new Error("RANDAO reveal is an invalid signature"); } diff --git a/packages/state-transition/src/block/processSyncCommittee.ts b/packages/state-transition/src/block/processSyncCommittee.ts index b2dbe165a0..ec7cae676b 100644 --- a/packages/state-transition/src/block/processSyncCommittee.ts +++ b/packages/state-transition/src/block/processSyncCommittee.ts @@ -3,6 +3,7 @@ import {BeaconConfig} from "@lodestar/config"; import {DOMAIN_SYNC_COMMITTEE, SYNC_COMMITTEE_SIZE} from "@lodestar/params"; import {altair, ssz} from "@lodestar/types"; import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; +import {SyncCommitteeCache} from "../cache/syncCommitteeCache.js"; import {G2_POINT_AT_INFINITY} from "../constants/index.js"; import {CachedBeaconStateAllForks} from "../types.js"; import { @@ -28,7 +29,7 @@ export function processSyncAggregate( const signatureSet = getSyncCommitteeSignatureSet( state.config, state.epochCtx.index2pubkey, - state, + state.epochCtx.currentSyncCommitteeIndexed, block, participantIndices ); @@ -73,7 +74,7 @@ export function processSyncAggregate( export function getSyncCommitteeSignatureSet( config: BeaconConfig, index2pubkey: Index2PubkeyCache, - state: CachedBeaconStateAllForks, + currentSyncCommitteeIndexed: SyncCommitteeCache, block: altair.BeaconBlock, /** Optional parameter to prevent computing it twice */ participantIndices?: number[] @@ -101,7 +102,7 @@ export function getSyncCommitteeSignatureSet( const rootSigned = block.parentRoot; if (!participantIndices) { - const committeeIndices = state.epochCtx.currentSyncCommitteeIndexed.validatorIndices; + const committeeIndices = currentSyncCommitteeIndexed.validatorIndices; participantIndices = syncAggregate.syncCommitteeBits.intersectValues(committeeIndices); } @@ -115,7 +116,9 @@ export function getSyncCommitteeSignatureSet( throw Error("Empty sync committee signature is not infinity"); } - const domain = config.getDomain(state.slot, DOMAIN_SYNC_COMMITTEE, previousSlot); + // the getDomain() api requires the state slot as 1st param, however it's the same to block.slot in state-transition + // and the same epoch when we verify blocks in batch in beacon-node. So we can safely use block.slot here. + const domain = config.getDomain(block.slot, DOMAIN_SYNC_COMMITTEE, previousSlot); return { type: SignatureSetType.aggregate, diff --git a/packages/state-transition/src/block/processVoluntaryExit.ts b/packages/state-transition/src/block/processVoluntaryExit.ts index c3d1ce860b..2a5f103c29 100644 --- a/packages/state-transition/src/block/processVoluntaryExit.ts +++ b/packages/state-transition/src/block/processVoluntaryExit.ts @@ -76,7 +76,7 @@ export function getVoluntaryExitValidity( if ( verifySignature && - !verifyVoluntaryExitSignature(state.config, epochCtx.index2pubkey, state, signedVoluntaryExit) + !verifyVoluntaryExitSignature(state.config, epochCtx.index2pubkey, state.slot, signedVoluntaryExit) ) { return VoluntaryExitValidity.invalidSignature; } diff --git a/packages/state-transition/src/signatureSets/attesterSlashings.ts b/packages/state-transition/src/signatureSets/attesterSlashings.ts index 335b5717ad..57256fda5f 100644 --- a/packages/state-transition/src/signatureSets/attesterSlashings.ts +++ b/packages/state-transition/src/signatureSets/attesterSlashings.ts @@ -1,19 +1,20 @@ import {BeaconConfig} from "@lodestar/config"; import {DOMAIN_BEACON_ATTESTER} from "@lodestar/params"; -import {AttesterSlashing, IndexedAttestationBigint, SignedBeaconBlock, ssz} from "@lodestar/types"; +import {AttesterSlashing, IndexedAttestationBigint, SignedBeaconBlock, Slot, ssz} from "@lodestar/types"; import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; -import {CachedBeaconStateAllForks} from "../types.js"; import {ISignatureSet, SignatureSetType, computeSigningRoot, computeStartSlotAtEpoch} from "../util/index.js"; /** Get signature sets from all AttesterSlashing objects in a block */ export function getAttesterSlashingsSignatureSets( config: BeaconConfig, index2pubkey: Index2PubkeyCache, - state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock ): ISignatureSet[] { + // the getDomain() api requires the state slot as 1st param, however it's the same to block.slot in state-transition + // and the same epoch when we verify blocks in batch in beacon-node. So we can safely use block.slot here. + const blockSlot = signedBlock.message.slot; return signedBlock.message.body.attesterSlashings.flatMap((attesterSlashing) => - getAttesterSlashingSignatureSets(config, index2pubkey, state, attesterSlashing) + getAttesterSlashingSignatureSets(config, index2pubkey, blockSlot, attesterSlashing) ); } @@ -21,22 +22,22 @@ export function getAttesterSlashingsSignatureSets( export function getAttesterSlashingSignatureSets( config: BeaconConfig, index2pubkey: Index2PubkeyCache, - state: CachedBeaconStateAllForks, + stateSlot: Slot, attesterSlashing: AttesterSlashing ): ISignatureSet[] { return [attesterSlashing.attestation1, attesterSlashing.attestation2].map((attestation) => - getIndexedAttestationBigintSignatureSet(config, index2pubkey, state, attestation) + getIndexedAttestationBigintSignatureSet(config, index2pubkey, stateSlot, attestation) ); } export function getIndexedAttestationBigintSignatureSet( config: BeaconConfig, index2pubkey: Index2PubkeyCache, - state: CachedBeaconStateAllForks, + stateSlot: Slot, indexedAttestation: IndexedAttestationBigint ): ISignatureSet { - const slot = computeStartSlotAtEpoch(Number(indexedAttestation.data.target.epoch as bigint)); - const domain = config.getDomain(state.slot, DOMAIN_BEACON_ATTESTER, slot); + const messageSlot = computeStartSlotAtEpoch(Number(indexedAttestation.data.target.epoch as bigint)); + const domain = config.getDomain(stateSlot, DOMAIN_BEACON_ATTESTER, messageSlot); return { type: SignatureSetType.aggregate, diff --git a/packages/state-transition/src/signatureSets/index.ts b/packages/state-transition/src/signatureSets/index.ts index 3d3ad0254d..7397074d07 100644 --- a/packages/state-transition/src/signatureSets/index.ts +++ b/packages/state-transition/src/signatureSets/index.ts @@ -3,7 +3,7 @@ import {ForkSeq} from "@lodestar/params"; import {IndexedAttestation, SignedBeaconBlock, altair, capella} from "@lodestar/types"; import {getSyncCommitteeSignatureSet} from "../block/processSyncCommittee.js"; import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; -import {CachedBeaconStateAllForks, CachedBeaconStateAltair} from "../types.js"; +import {SyncCommitteeCache} from "../cache/syncCommitteeCache.js"; import {ISignatureSet} from "../util/index.js"; import {getAttesterSlashingsSignatureSets} from "./attesterSlashings.js"; import {getBlsToExecutionChangeSignatureSets} from "./blsToExecutionChange.js"; @@ -29,7 +29,7 @@ export * from "./voluntaryExits.js"; export function getBlockSignatureSets( config: BeaconConfig, index2pubkey: Index2PubkeyCache, - state: CachedBeaconStateAllForks, + currentSyncCommitteeIndexed: SyncCommitteeCache, signedBlock: SignedBeaconBlock, indexedAttestations: IndexedAttestation[], opts?: { @@ -38,18 +38,18 @@ export function getBlockSignatureSets( } ): ISignatureSet[] { // fork based validations - const fork = state.config.getForkSeq(signedBlock.message.slot); + const fork = config.getForkSeq(signedBlock.message.slot); const signatureSets = [ - getRandaoRevealSignatureSet(config, index2pubkey, state, signedBlock.message), - ...getProposerSlashingsSignatureSets(config, index2pubkey, state, signedBlock), - ...getAttesterSlashingsSignatureSets(config, index2pubkey, state, signedBlock), - ...getAttestationsSignatureSets(config, index2pubkey, state, signedBlock, indexedAttestations), - ...getVoluntaryExitsSignatureSets(config, index2pubkey, state, signedBlock), + getRandaoRevealSignatureSet(config, index2pubkey, signedBlock.message), + ...getProposerSlashingsSignatureSets(config, index2pubkey, signedBlock), + ...getAttesterSlashingsSignatureSets(config, index2pubkey, signedBlock), + ...getAttestationsSignatureSets(config, index2pubkey, signedBlock, indexedAttestations), + ...getVoluntaryExitsSignatureSets(config, index2pubkey, signedBlock), ]; if (!opts?.skipProposerSignature) { - signatureSets.push(getBlockProposerSignatureSet(config, index2pubkey, state, signedBlock)); + signatureSets.push(getBlockProposerSignatureSet(config, index2pubkey, signedBlock)); } // Only after altair fork, validate tSyncCommitteeSignature @@ -57,7 +57,7 @@ export function getBlockSignatureSets( const syncCommitteeSignatureSet = getSyncCommitteeSignatureSet( config, index2pubkey, - state as CachedBeaconStateAltair, + currentSyncCommitteeIndexed, (signedBlock as altair.SignedBeaconBlock).message ); // There may be no participants in this syncCommitteeSignature, so it must not be validated @@ -69,7 +69,7 @@ export function getBlockSignatureSets( // only after capella fork if (fork >= ForkSeq.capella) { const blsToExecutionChangeSignatureSets = getBlsToExecutionChangeSignatureSets( - state.config, + config, signedBlock as capella.SignedBeaconBlock ); if (blsToExecutionChangeSignatureSets.length > 0) { diff --git a/packages/state-transition/src/signatureSets/indexedAttestation.ts b/packages/state-transition/src/signatureSets/indexedAttestation.ts index f3a9724736..bb8d00e6b7 100644 --- a/packages/state-transition/src/signatureSets/indexedAttestation.ts +++ b/packages/state-transition/src/signatureSets/indexedAttestation.ts @@ -1,8 +1,7 @@ import {BeaconConfig} from "@lodestar/config"; import {DOMAIN_BEACON_ATTESTER} from "@lodestar/params"; -import {IndexedAttestation, SignedBeaconBlock, phase0, ssz} from "@lodestar/types"; +import {IndexedAttestation, SignedBeaconBlock, Slot, phase0, ssz} from "@lodestar/types"; import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; -import {CachedBeaconStateAllForks} from "../types.js"; import { ISignatureSet, computeSigningRoot, @@ -12,11 +11,11 @@ import { export function getAttestationDataSigningRoot( config: BeaconConfig, - state: CachedBeaconStateAllForks, + stateSlot: Slot, data: phase0.AttestationData ): Uint8Array { - const slot = computeStartSlotAtEpoch(data.target.epoch); - const domain = config.getDomain(state.slot, DOMAIN_BEACON_ATTESTER, slot); + const messageSlot = computeStartSlotAtEpoch(data.target.epoch); + const domain = config.getDomain(stateSlot, DOMAIN_BEACON_ATTESTER, messageSlot); return computeSigningRoot(ssz.phase0.AttestationData, data, domain); } @@ -24,13 +23,13 @@ export function getAttestationDataSigningRoot( export function getAttestationWithIndicesSignatureSet( config: BeaconConfig, index2pubkey: Index2PubkeyCache, - state: CachedBeaconStateAllForks, + stateSlot: Slot, attestation: Pick, attestingIndices: number[] ): ISignatureSet { return createAggregateSignatureSetFromComponents( attestingIndices.map((i) => index2pubkey[i]), - getAttestationDataSigningRoot(config, state, attestation.data), + getAttestationDataSigningRoot(config, stateSlot, attestation.data), attestation.signature ); } @@ -38,13 +37,13 @@ export function getAttestationWithIndicesSignatureSet( export function getIndexedAttestationSignatureSet( config: BeaconConfig, index2pubkey: Index2PubkeyCache, - state: CachedBeaconStateAllForks, + stateSlot: Slot, indexedAttestation: IndexedAttestation ): ISignatureSet { return getAttestationWithIndicesSignatureSet( config, index2pubkey, - state, + stateSlot, indexedAttestation, indexedAttestation.attestingIndices ); @@ -53,7 +52,6 @@ export function getIndexedAttestationSignatureSet( export function getAttestationsSignatureSets( config: BeaconConfig, index2pubkey: Index2PubkeyCache, - state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock, indexedAttestations: IndexedAttestation[] ): ISignatureSet[] { @@ -62,7 +60,10 @@ export function getAttestationsSignatureSets( `Indexed attestations length mismatch: got ${indexedAttestations.length}, expected ${signedBlock.message.body.attestations.length}` ); } + // the getDomain() api requires the state slot as 1st param, however it's the same to block.slot in state-transition + // and the same epoch when we verify blocks in batch in beacon-node. So we can safely use block.slot here. + const blockSlot = signedBlock.message.slot; return indexedAttestations.map((indexedAttestation) => - getIndexedAttestationSignatureSet(config, index2pubkey, state, indexedAttestation) + getIndexedAttestationSignatureSet(config, index2pubkey, blockSlot, indexedAttestation) ); } diff --git a/packages/state-transition/src/signatureSets/proposer.ts b/packages/state-transition/src/signatureSets/proposer.ts index 3ffd346d31..cf692fcac9 100644 --- a/packages/state-transition/src/signatureSets/proposer.ts +++ b/packages/state-transition/src/signatureSets/proposer.ts @@ -9,20 +9,21 @@ import {ISignatureSet, SignatureSetType, verifySignatureSet} from "../util/signa export function verifyProposerSignature( config: BeaconConfig, index2pubkey: Index2PubkeyCache, - state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock | SignedBlindedBeaconBlock ): boolean { - const signatureSet = getBlockProposerSignatureSet(config, index2pubkey, state, signedBlock); + const signatureSet = getBlockProposerSignatureSet(config, index2pubkey, signedBlock); return verifySignatureSet(signatureSet); } export function getBlockProposerSignatureSet( config: BeaconConfig, index2pubkey: Index2PubkeyCache, - state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock | SignedBlindedBeaconBlock ): ISignatureSet { - const domain = config.getDomain(state.slot, DOMAIN_BEACON_PROPOSER, signedBlock.message.slot); + // the getDomain() api requires the state slot as 1st param, however it's the same to block.slot in state-transition + // and the same epoch when we verify blocks in batch in beacon-node. So we can safely use block.slot here. + const blockSlot = signedBlock.message.slot; + const domain = config.getDomain(blockSlot, DOMAIN_BEACON_PROPOSER, blockSlot); const blockType = isBlindedBeaconBlock(signedBlock.message) ? config.getPostBellatrixForkTypes(signedBlock.message.slot).BlindedBeaconBlock diff --git a/packages/state-transition/src/signatureSets/proposerSlashings.ts b/packages/state-transition/src/signatureSets/proposerSlashings.ts index 218eef8d2d..058d87cf49 100644 --- a/packages/state-transition/src/signatureSets/proposerSlashings.ts +++ b/packages/state-transition/src/signatureSets/proposerSlashings.ts @@ -1,8 +1,7 @@ import {BeaconConfig} from "@lodestar/config"; import {DOMAIN_BEACON_PROPOSER} from "@lodestar/params"; -import {SignedBeaconBlock, phase0, ssz} from "@lodestar/types"; +import {SignedBeaconBlock, Slot, phase0, ssz} from "@lodestar/types"; import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; -import {CachedBeaconStateAllForks} from "../types.js"; import {ISignatureSet, SignatureSetType, computeSigningRoot} from "../util/index.js"; /** @@ -11,7 +10,7 @@ import {ISignatureSet, SignatureSetType, computeSigningRoot} from "../util/index export function getProposerSlashingSignatureSets( config: BeaconConfig, index2pubkey: Index2PubkeyCache, - state: CachedBeaconStateAllForks, + stateSlot: Slot, proposerSlashing: phase0.ProposerSlashing ): ISignatureSet[] { const pubkey = index2pubkey[proposerSlashing.signedHeader1.message.proposerIndex]; @@ -19,7 +18,7 @@ export function getProposerSlashingSignatureSets( // In state transition, ProposerSlashing headers are only partially validated. Their slot could be higher than the // clock and the slashing would still be valid. Must use bigint variants to hash correctly to all possible values return [proposerSlashing.signedHeader1, proposerSlashing.signedHeader2].map((signedHeader): ISignatureSet => { - const domain = config.getDomain(state.slot, DOMAIN_BEACON_PROPOSER, Number(signedHeader.message.slot as bigint)); + const domain = config.getDomain(stateSlot, DOMAIN_BEACON_PROPOSER, Number(signedHeader.message.slot as bigint)); return { type: SignatureSetType.single, @@ -33,10 +32,12 @@ export function getProposerSlashingSignatureSets( export function getProposerSlashingsSignatureSets( config: BeaconConfig, index2pubkey: Index2PubkeyCache, - state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock ): ISignatureSet[] { + // the getDomain() api requires the state slot as 1st param, however it's the same to block.slot in state-transition + // and the same epoch when we verify blocks in batch in beacon-node. So we can safely use block.slot here. + const blockSlot = signedBlock.message.slot; return signedBlock.message.body.proposerSlashings.flatMap((proposerSlashing) => - getProposerSlashingSignatureSets(config, index2pubkey, state, proposerSlashing) + getProposerSlashingSignatureSets(config, index2pubkey, blockSlot, proposerSlashing) ); } diff --git a/packages/state-transition/src/signatureSets/randao.ts b/packages/state-transition/src/signatureSets/randao.ts index 3b6dc46376..6cb99cee30 100644 --- a/packages/state-transition/src/signatureSets/randao.ts +++ b/packages/state-transition/src/signatureSets/randao.ts @@ -2,7 +2,6 @@ import {BeaconConfig} from "@lodestar/config"; import {DOMAIN_RANDAO} from "@lodestar/params"; import {BeaconBlock, ssz} from "@lodestar/types"; import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; -import {CachedBeaconStateAllForks} from "../types.js"; import { ISignatureSet, SignatureSetType, @@ -14,10 +13,9 @@ import { export function verifyRandaoSignature( config: BeaconConfig, index2pubkey: Index2PubkeyCache, - state: CachedBeaconStateAllForks, block: BeaconBlock ): boolean { - return verifySignatureSet(getRandaoRevealSignatureSet(config, index2pubkey, state, block)); + return verifySignatureSet(getRandaoRevealSignatureSet(config, index2pubkey, block)); } /** @@ -26,12 +24,13 @@ export function verifyRandaoSignature( export function getRandaoRevealSignatureSet( config: BeaconConfig, index2pubkey: Index2PubkeyCache, - state: CachedBeaconStateAllForks, block: BeaconBlock ): ISignatureSet { // should not get epoch from epochCtx const epoch = computeEpochAtSlot(block.slot); - const domain = config.getDomain(state.slot, DOMAIN_RANDAO, block.slot); + // the getDomain() api requires the state slot as 1st param, however it's the same to block.slot in state-transition + // and the same epoch when we verify blocks in batch in beacon-node. So we can safely use block.slot here. + const domain = config.getDomain(block.slot, DOMAIN_RANDAO, block.slot); return { type: SignatureSetType.single, diff --git a/packages/state-transition/src/signatureSets/voluntaryExits.ts b/packages/state-transition/src/signatureSets/voluntaryExits.ts index b7d6e82b01..7c2086aa58 100644 --- a/packages/state-transition/src/signatureSets/voluntaryExits.ts +++ b/packages/state-transition/src/signatureSets/voluntaryExits.ts @@ -1,7 +1,6 @@ import {BeaconConfig} from "@lodestar/config"; -import {SignedBeaconBlock, phase0, ssz} from "@lodestar/types"; +import {SignedBeaconBlock, Slot, phase0, ssz} from "@lodestar/types"; import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; -import {CachedBeaconStateAllForks} from "../types.js"; import { ISignatureSet, SignatureSetType, @@ -13,10 +12,10 @@ import { export function verifyVoluntaryExitSignature( config: BeaconConfig, index2pubkey: Index2PubkeyCache, - state: CachedBeaconStateAllForks, + stateSlot: Slot, signedVoluntaryExit: phase0.SignedVoluntaryExit ): boolean { - return verifySignatureSet(getVoluntaryExitSignatureSet(config, index2pubkey, state, signedVoluntaryExit)); + return verifySignatureSet(getVoluntaryExitSignatureSet(config, index2pubkey, stateSlot, signedVoluntaryExit)); } /** @@ -25,11 +24,11 @@ export function verifyVoluntaryExitSignature( export function getVoluntaryExitSignatureSet( config: BeaconConfig, index2pubkey: Index2PubkeyCache, - state: CachedBeaconStateAllForks, + stateSlot: Slot, signedVoluntaryExit: phase0.SignedVoluntaryExit ): ISignatureSet { - const slot = computeStartSlotAtEpoch(signedVoluntaryExit.message.epoch); - const domain = config.getDomainForVoluntaryExit(state.slot, slot); + const messageSlot = computeStartSlotAtEpoch(signedVoluntaryExit.message.epoch); + const domain = config.getDomainForVoluntaryExit(stateSlot, messageSlot); return { type: SignatureSetType.single, @@ -42,10 +41,12 @@ export function getVoluntaryExitSignatureSet( export function getVoluntaryExitsSignatureSets( config: BeaconConfig, index2pubkey: Index2PubkeyCache, - state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock ): ISignatureSet[] { + // the getDomain() api requires the state slot as 1st param, however it's the same to block.slot in state-transition + // and the same epoch when we verify blocks in batch in beacon-node. So we can safely use block.slot here. + const blockSlot = signedBlock.message.slot; return signedBlock.message.body.voluntaryExits.map((voluntaryExit) => - getVoluntaryExitSignatureSet(config, index2pubkey, state, voluntaryExit) + getVoluntaryExitSignatureSet(config, index2pubkey, blockSlot, voluntaryExit) ); } diff --git a/packages/state-transition/src/stateTransition.ts b/packages/state-transition/src/stateTransition.ts index 3f2bc6147d..9e1f006cf8 100644 --- a/packages/state-transition/src/stateTransition.ts +++ b/packages/state-transition/src/stateTransition.ts @@ -111,10 +111,7 @@ export function stateTransition( postState = processSlotsWithTransientCache(postState, blockSlot, options, {metrics, validatorMonitor}); // Verify proposer signature only - if ( - verifyProposer && - !verifyProposerSignature(postState.config, postState.epochCtx.index2pubkey, postState, signedBlock) - ) { + if (verifyProposer && !verifyProposerSignature(postState.config, postState.epochCtx.index2pubkey, signedBlock)) { throw new Error("Invalid block signature"); } diff --git a/packages/state-transition/test/unit/signatureSets/signatureSets.test.ts b/packages/state-transition/test/unit/signatureSets/signatureSets.test.ts index 6fb825bb41..eead18b1e8 100644 --- a/packages/state-transition/test/unit/signatureSets/signatureSets.test.ts +++ b/packages/state-transition/test/unit/signatureSets/signatureSets.test.ts @@ -73,7 +73,7 @@ describe("signatureSets", () => { const signatureSets = getBlockSignatureSets( state.config, state.epochCtx.index2pubkey, - state, + state.epochCtx.currentSyncCommitteeIndexed, signedBlock, indexedAttestations ); From ae3f082e014762f6d44e4e271f8f69755041bc1b Mon Sep 17 00:00:00 2001 From: Nico Flaig Date: Wed, 31 Dec 2025 16:09:54 +0100 Subject: [PATCH 18/20] fix: prevent duplicate aggregates passing validation due to race condition (#8716) **Motivation** https://github.com/ChainSafe/lodestar/pull/8711#pullrequestreview-3612431091 **Description** Prevent duplicate aggregates passing gossip validation due to race condition by checking again if we've seen the aggregate before inserting it into op pool. This is required since we run multiple async operations in-between first check and inserting it into op pool. image `AlreadyKnown` disappears since we now filter those out properly during gossip validation which is important since we don't wanna re-gossip those aggregates. --- .../src/chain/validation/aggregateAndProof.ts | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/packages/beacon-node/src/chain/validation/aggregateAndProof.ts b/packages/beacon-node/src/chain/validation/aggregateAndProof.ts index c01735b483..c8cbebac6e 100644 --- a/packages/beacon-node/src/chain/validation/aggregateAndProof.ts +++ b/packages/beacon-node/src/chain/validation/aggregateAndProof.ts @@ -245,6 +245,18 @@ async function validateAggregateAndProof( }); } + // Same race-condition check as above for seen aggregators + if ( + !skipValidationKnownAttesters && + chain.seenAggregatedAttestations.isKnown(targetEpoch, attIndex, attDataRootHex, aggregationBits) + ) { + throw new AttestationError(GossipAction.IGNORE, { + code: AttestationErrorCode.ATTESTERS_ALREADY_KNOWN, + targetEpoch, + aggregateRoot: attDataRootHex, + }); + } + chain.seenAggregators.add(targetEpoch, aggregatorIndex); chain.seenAggregatedAttestations.add( targetEpoch, From 075956b855c0451b8aa1b0118375e390a7eeb042 Mon Sep 17 00:00:00 2001 From: Nico Flaig Date: Wed, 31 Dec 2025 16:11:31 +0100 Subject: [PATCH 19/20] refactor: use map to lookup combined beacon committee selection for duty (#8710) https://github.com/ChainSafe/lodestar/pull/8669#discussion_r2636951039 --- .../src/services/attestationDuties.ts | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/packages/validator/src/services/attestationDuties.ts b/packages/validator/src/services/attestationDuties.ts index 6fcb2cc423..b32121cca9 100644 --- a/packages/validator/src/services/attestationDuties.ts +++ b/packages/validator/src/services/attestationDuties.ts @@ -469,20 +469,33 @@ export class AttestationDutiesService { const res = await this.api.validator.submitBeaconCommitteeSelections({selections: partialSelections}); - const combinedSelections = res.value(); - this.logger.debug("Received combined beacon committee selection proofs", {epoch, count: combinedSelections.length}); + const combinedSelections = new Map(); + for (const selection of res.value()) { + combinedSelections.set(selection.validatorIndex, selection); + } + this.logger.debug("Received combined beacon committee selection proofs", {epoch, count: combinedSelections.size}); for (const dutyAndProof of duties) { const {slot, validatorIndex, committeeIndex, committeeLength} = dutyAndProof.duty; const logCtxValidator = {slot, index: committeeIndex, validatorIndex}; - const combinedSelection = combinedSelections.find((s) => s.validatorIndex === validatorIndex && s.slot === slot); + const combinedSelection = combinedSelections.get(validatorIndex); if (!combinedSelection) { this.logger.debug("Did not receive combined beacon committee selection proof", logCtxValidator); continue; } + if (combinedSelection.slot !== slot) { + this.logger.debug("Received combined beacon committee selection proof for different slot", { + expected: slot, + actual: combinedSelection.slot, + index: committeeIndex, + validatorIndex, + }); + continue; + } + const isAggregator = isAggregatorFromCommitteeLength(committeeLength, combinedSelection.selectionProof); if (isAggregator) { From 86298a43e647760d3ca92b37e3db6ddf586b3f7c Mon Sep 17 00:00:00 2001 From: twoeths <10568965+twoeths@users.noreply.github.com> Date: Tue, 6 Jan 2026 08:48:58 +0700 Subject: [PATCH 20/20] refactor: move reward apis to state-transition (#8719) **Motivation** - the reward apis tightly couple to state-transition functions like `beforeProcessEpoch() processBlock() processAttestationAltair()` so it needs to be moved there **Description** - move api type definitions to `types` package so that it can be used everywhere - move reward apis implementation to `state-transition` package Closes #8690 --------- Co-authored-by: Tuyen Nguyen --- .../api/src/beacon/routes/beacon/index.ts | 7 -- packages/api/src/beacon/routes/beacon/pool.ts | 2 +- .../api/src/beacon/routes/beacon/rewards.ts | 116 ++---------------- .../api/src/beacon/routes/beacon/state.ts | 3 +- packages/api/src/beacon/routes/config.ts | 3 +- packages/api/src/beacon/routes/debug.ts | 3 +- packages/api/src/beacon/routes/lodestar.ts | 3 +- packages/api/src/beacon/routes/node.ts | 3 +- packages/api/src/beacon/routes/proof.ts | 3 +- packages/api/src/beacon/routes/validator.ts | 2 +- packages/api/src/builder/routes.ts | 2 +- packages/api/src/utils/codecs.ts | 12 +- packages/beacon-node/src/chain/chain.ts | 13 +- packages/beacon-node/src/chain/interface.ts | 10 +- packages/state-transition/src/index.ts | 1 + .../src}/rewards/attestationsRewards.ts | 26 ++-- .../src}/rewards/blockRewards.ts | 16 +-- .../state-transition/src/rewards/index.ts | 3 + .../src}/rewards/syncCommitteeRewards.ts | 9 +- .../test/unit}/rewards/blockRewards.test.ts | 18 +-- packages/types/src/index.ts | 2 + packages/types/src/utils/array.ts | 11 ++ packages/types/src/utils/rewards.ts | 102 +++++++++++++++ 23 files changed, 174 insertions(+), 196 deletions(-) rename packages/{beacon-node/src/chain => state-transition/src}/rewards/attestationsRewards.ts (91%) rename packages/{beacon-node/src/chain => state-transition/src}/rewards/blockRewards.ts (93%) create mode 100644 packages/state-transition/src/rewards/index.ts rename packages/{beacon-node/src/chain => state-transition/src}/rewards/syncCommitteeRewards.ts (86%) rename packages/{beacon-node/test/unit/chain => state-transition/test/unit}/rewards/blockRewards.test.ts (92%) create mode 100644 packages/types/src/utils/array.ts create mode 100644 packages/types/src/utils/rewards.ts diff --git a/packages/api/src/beacon/routes/beacon/index.ts b/packages/api/src/beacon/routes/beacon/index.ts index 4354f086bd..2e0326b3e1 100644 --- a/packages/api/src/beacon/routes/beacon/index.ts +++ b/packages/api/src/beacon/routes/beacon/index.ts @@ -13,13 +13,6 @@ export {block, pool, state, rewards}; export type {BlockHeaderResponse, BlockId} from "./block.js"; export {BroadcastValidation} from "./block.js"; -export type { - AttestationsRewards, - BlockRewards, - IdealAttestationsReward, - SyncCommitteeRewards, - TotalAttestationsReward, -} from "./rewards.js"; // TODO: Review if re-exporting all these types is necessary export type { EpochCommitteeResponse, diff --git a/packages/api/src/beacon/routes/beacon/pool.ts b/packages/api/src/beacon/routes/beacon/pool.ts index ee43280d73..aa15390c2d 100644 --- a/packages/api/src/beacon/routes/beacon/pool.ts +++ b/packages/api/src/beacon/routes/beacon/pool.ts @@ -2,6 +2,7 @@ import {ValueOf} from "@chainsafe/ssz"; import {ChainForkConfig} from "@lodestar/config"; import {ForkPostElectra, ForkPreElectra, isForkPostElectra} from "@lodestar/params"; import { + ArrayOf, AttesterSlashing, CommitteeIndex, SingleAttestation, @@ -12,7 +13,6 @@ import { ssz, } from "@lodestar/types"; import { - ArrayOf, EmptyArgs, EmptyMeta, EmptyMetaCodec, diff --git a/packages/api/src/beacon/routes/beacon/rewards.ts b/packages/api/src/beacon/routes/beacon/rewards.ts index 4335b5886c..724f385473 100644 --- a/packages/api/src/beacon/routes/beacon/rewards.ts +++ b/packages/api/src/beacon/routes/beacon/rewards.ts @@ -1,112 +1,12 @@ -import {ContainerType, ValueOf} from "@chainsafe/ssz"; import {ChainForkConfig} from "@lodestar/config"; -import {Epoch, ssz} from "@lodestar/types"; -import {ArrayOf, JsonOnlyReq} from "../../../utils/codecs.js"; +import {Epoch, rewards} from "@lodestar/types"; +import {JsonOnlyReq} from "../../../utils/codecs.js"; import {Endpoint, RouteDefinitions, Schema} from "../../../utils/index.js"; import {ExecutionOptimisticAndFinalizedCodec, ExecutionOptimisticAndFinalizedMeta} from "../../../utils/metadata.js"; import {fromValidatorIdsStr, toValidatorIdsStr} from "../../../utils/serdes.js"; import {BlockArgs} from "./block.js"; import {ValidatorId} from "./state.js"; -const BlockRewardsType = new ContainerType( - { - /** Proposer of the block, the proposer index who receives these rewards */ - proposerIndex: ssz.ValidatorIndex, - /** Total block reward, equal to attestations + sync_aggregate + proposer_slashings + attester_slashings */ - total: ssz.UintNum64, - /** Block reward component due to included attestations */ - attestations: ssz.UintNum64, - /** Block reward component due to included sync_aggregate */ - syncAggregate: ssz.UintNum64, - /** Block reward component due to included proposer_slashings */ - proposerSlashings: ssz.UintNum64, - /** Block reward component due to included attester_slashings */ - attesterSlashings: ssz.UintNum64, - }, - {jsonCase: "eth2"} -); - -const AttestationsRewardType = new ContainerType( - { - /** Reward for head vote. Could be negative to indicate penalty */ - head: ssz.UintNum64, - /** Reward for target vote. Could be negative to indicate penalty */ - target: ssz.UintNum64, - /** Reward for source vote. Could be negative to indicate penalty */ - source: ssz.UintNum64, - /** Inclusion delay reward (phase0 only) */ - inclusionDelay: ssz.UintNum64, - /** Inactivity penalty. Should be a negative number to indicate penalty */ - inactivity: ssz.UintNum64, - }, - {jsonCase: "eth2"} -); - -const IdealAttestationsRewardsType = new ContainerType( - { - ...AttestationsRewardType.fields, - effectiveBalance: ssz.UintNum64, - }, - {jsonCase: "eth2"} -); - -const TotalAttestationsRewardsType = new ContainerType( - { - ...AttestationsRewardType.fields, - validatorIndex: ssz.ValidatorIndex, - }, - {jsonCase: "eth2"} -); - -const AttestationsRewardsType = new ContainerType( - { - idealRewards: ArrayOf(IdealAttestationsRewardsType), - totalRewards: ArrayOf(TotalAttestationsRewardsType), - }, - {jsonCase: "eth2"} -); - -const SyncCommitteeRewardsType = ArrayOf( - new ContainerType( - { - validatorIndex: ssz.ValidatorIndex, - reward: ssz.UintNum64, - }, - {jsonCase: "eth2"} - ) -); - -/** - * Rewards info for a single block. Every reward value is in Gwei. - */ -export type BlockRewards = ValueOf; - -/** - * Rewards for a single set of (ideal or actual depending on usage) attestations. Reward value is in Gwei - */ -export type AttestationsReward = ValueOf; - -/** - * Rewards info for ideal attestations ie. Maximum rewards could be earned by making timely head, target and source vote. - * `effectiveBalance` is in Gwei - */ -export type IdealAttestationsReward = ValueOf; - -/** - * Rewards info for actual attestations - */ -export type TotalAttestationsReward = ValueOf; - -export type AttestationsRewards = ValueOf; - -/** - * Rewards info for sync committee participation. Every reward value is in Gwei. - * Note: In the case that block proposer is present in `SyncCommitteeRewards`, the reward value only reflects rewards for - * participating in sync committee. Please refer to `BlockRewards.syncAggregate` for rewards of proposer including sync committee - * outputs into their block - */ -export type SyncCommitteeRewards = ValueOf; - export type Endpoints = { /** * Get block rewards @@ -116,7 +16,7 @@ export type Endpoints = { "GET", BlockArgs, {params: {block_id: string}}, - BlockRewards, + rewards.BlockRewards, ExecutionOptimisticAndFinalizedMeta >; @@ -133,7 +33,7 @@ export type Endpoints = { validatorIds?: ValidatorId[]; }, {params: {epoch: number}; body: string[]}, - AttestationsRewards, + rewards.AttestationsRewards, ExecutionOptimisticAndFinalizedMeta >; @@ -148,7 +48,7 @@ export type Endpoints = { validatorIds?: ValidatorId[]; }, {params: {block_id: string}; body: string[]}, - SyncCommitteeRewards, + rewards.SyncCommitteeRewards, ExecutionOptimisticAndFinalizedMeta >; }; @@ -164,7 +64,7 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions = { isEmpty: true, }; -export function ArrayOf(elementType: Type, limit = Infinity): ArrayType, unknown, unknown> { - if (isCompositeType(elementType)) { - return new ListCompositeType(elementType, limit) as unknown as ArrayType, unknown, unknown>; - } - if (isBasicType(elementType)) { - return new ListBasicType(elementType, limit) as unknown as ArrayType, unknown, unknown>; - } - throw Error(`Unknown type ${elementType.typeName}`); -} - export function WithMeta(getType: (m: M) => Type): ResponseDataCodec { return { toJson: (data, meta: M) => getType(meta).toJson(data), diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index c8b8dade5c..4e27c3354c 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -14,9 +14,12 @@ import { EpochShuffling, Index2PubkeyCache, computeAnchorCheckpoint, + computeAttestationsRewards, + computeBlockRewards, computeEndSlotAtEpoch, computeEpochAtSlot, computeStartSlotAtEpoch, + computeSyncCommitteeRewards, getEffectiveBalanceIncrementsZeroInactive, getEffectiveBalancesFromStateBytes, processSlots, @@ -36,6 +39,7 @@ import { Wei, isBlindedBeaconBlock, phase0, + rewards, } from "@lodestar/types"; import {Logger, fromHex, gweiToWei, isErrorAborted, pruneSetToMax, sleep, toRootHex} from "@lodestar/utils"; import {ProcessShutdownCallback} from "@lodestar/validator"; @@ -77,9 +81,6 @@ import {AssembledBlockType, BlockType, ProduceResult} from "./produceBlock/index import {BlockAttributes, produceBlockBody, produceCommonBlockBody} from "./produceBlock/produceBlockBody.js"; import {QueuedStateRegenerator, RegenCaller} from "./regen/index.js"; import {ReprocessController} from "./reprocess.js"; -import {AttestationsRewards, computeAttestationsRewards} from "./rewards/attestationsRewards.js"; -import {BlockRewards, computeBlockRewards} from "./rewards/blockRewards.js"; -import {SyncCommitteeRewards, computeSyncCommitteeRewards} from "./rewards/syncCommitteeRewards.js"; import { SeenAggregators, SeenAttesters, @@ -1285,7 +1286,7 @@ export class BeaconChain implements IBeaconChain { } } - async getBlockRewards(block: BeaconBlock | BlindedBeaconBlock): Promise { + async getBlockRewards(block: BeaconBlock | BlindedBeaconBlock): Promise { let preState = this.regen.getPreStateSync(block); if (preState === null) { @@ -1302,7 +1303,7 @@ export class BeaconChain implements IBeaconChain { async getAttestationsRewards( epoch: Epoch, validatorIds?: (ValidatorIndex | string)[] - ): Promise<{rewards: AttestationsRewards; executionOptimistic: boolean; finalized: boolean}> { + ): Promise<{rewards: rewards.AttestationsRewards; executionOptimistic: boolean; finalized: boolean}> { // We use end slot of (epoch + 1) to ensure we have seen all attestations. On-time or late. Any late attestation beyond this slot is not considered const slot = computeEndSlotAtEpoch(epoch + 1); const stateResult = await this.getStateBySlot(slot, {allowRegen: false}); // No regen if state not in cache @@ -1328,7 +1329,7 @@ export class BeaconChain implements IBeaconChain { async getSyncCommitteeRewards( block: BeaconBlock | BlindedBeaconBlock, validatorIds?: (ValidatorIndex | string)[] - ): Promise { + ): Promise { let preState = this.regen.getPreStateSync(block); if (preState === null) { diff --git a/packages/beacon-node/src/chain/interface.ts b/packages/beacon-node/src/chain/interface.ts index 5b51be3b9a..93ce377bc8 100644 --- a/packages/beacon-node/src/chain/interface.ts +++ b/packages/beacon-node/src/chain/interface.ts @@ -23,6 +23,7 @@ import { altair, capella, phase0, + rewards, } from "@lodestar/types"; import {Logger} from "@lodestar/utils"; import {IExecutionBuilder, IExecutionEngine} from "../execution/index.js"; @@ -48,9 +49,6 @@ import {IChainOptions} from "./options.js"; import {AssembledBlockType, BlockAttributes, BlockType, ProduceResult} from "./produceBlock/produceBlockBody.js"; import {IStateRegenerator, RegenCaller} from "./regen/index.js"; import {ReprocessController} from "./reprocess.js"; -import {AttestationsRewards} from "./rewards/attestationsRewards.js"; -import {BlockRewards} from "./rewards/blockRewards.js"; -import {SyncCommitteeRewards} from "./rewards/syncCommitteeRewards.js"; import { SeenAggregators, SeenAttesters, @@ -255,15 +253,15 @@ export interface IBeaconChain { regenCanAcceptWork(): boolean; blsThreadPoolCanAcceptWork(): boolean; - getBlockRewards(blockRef: BeaconBlock | BlindedBeaconBlock): Promise; + getBlockRewards(blockRef: BeaconBlock | BlindedBeaconBlock): Promise; getAttestationsRewards( epoch: Epoch, validatorIds?: (ValidatorIndex | string)[] - ): Promise<{rewards: AttestationsRewards; executionOptimistic: boolean; finalized: boolean}>; + ): Promise<{rewards: rewards.AttestationsRewards; executionOptimistic: boolean; finalized: boolean}>; getSyncCommitteeRewards( blockRef: BeaconBlock | BlindedBeaconBlock, validatorIds?: (ValidatorIndex | string)[] - ): Promise; + ): Promise; } export type SSZObjectType = diff --git a/packages/state-transition/src/index.ts b/packages/state-transition/src/index.ts index d06cfe6767..67ae559fe1 100644 --- a/packages/state-transition/src/index.ts +++ b/packages/state-transition/src/index.ts @@ -40,6 +40,7 @@ export { export * from "./constants/index.js"; export type {EpochTransitionStep} from "./epoch/index.js"; export {type BeaconStateTransitionMetrics, getMetrics} from "./metrics.js"; +export * from "./rewards/index.js"; export * from "./signatureSets/index.js"; export * from "./stateTransition.js"; export type { diff --git a/packages/beacon-node/src/chain/rewards/attestationsRewards.ts b/packages/state-transition/src/rewards/attestationsRewards.ts similarity index 91% rename from packages/beacon-node/src/chain/rewards/attestationsRewards.ts rename to packages/state-transition/src/rewards/attestationsRewards.ts index 7e53b4d943..b3fea34633 100644 --- a/packages/beacon-node/src/chain/rewards/attestationsRewards.ts +++ b/packages/state-transition/src/rewards/attestationsRewards.ts @@ -1,5 +1,4 @@ import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; -import {routes} from "@lodestar/api"; import {BeaconConfig} from "@lodestar/config"; import { EFFECTIVE_BALANCE_INCREMENT, @@ -14,24 +13,19 @@ import { WEIGHT_DENOMINATOR, isForkPostElectra, } from "@lodestar/params"; +import {ValidatorIndex, rewards} from "@lodestar/types"; +import {fromHex} from "@lodestar/utils"; +import {EpochTransitionCache, beforeProcessEpoch} from "../cache/epochTransitionCache.js"; +import {CachedBeaconStateAllForks, CachedBeaconStateAltair} from "../types.js"; import { - CachedBeaconStateAllForks, - CachedBeaconStateAltair, - EpochTransitionCache, FLAG_ELIGIBLE_ATTESTER, FLAG_PREV_HEAD_ATTESTER_UNSLASHED, FLAG_PREV_SOURCE_ATTESTER_UNSLASHED, FLAG_PREV_TARGET_ATTESTER_UNSLASHED, - beforeProcessEpoch, hasMarkers, isInInactivityLeak, -} from "@lodestar/state-transition"; -import {ValidatorIndex} from "@lodestar/types"; -import {fromHex} from "@lodestar/utils"; +} from "../util/index.js"; -export type AttestationsRewards = routes.beacon.AttestationsRewards; -type IdealAttestationsReward = routes.beacon.IdealAttestationsReward; -type TotalAttestationsReward = routes.beacon.TotalAttestationsReward; /** Attestations penalty with respect to effective balance in Gwei */ type AttestationsPenalty = {target: number; source: number; effectiveBalance: number}; @@ -43,7 +37,7 @@ export async function computeAttestationsRewards( pubkey2index: PubkeyIndexMap, state: CachedBeaconStateAllForks, validatorIds?: (ValidatorIndex | string)[] -): Promise { +): Promise { const fork = config.getForkName(state.slot); if (fork === ForkName.phase0) { throw Error("Unsupported fork. Attestations rewards calculation is not available in phase0"); @@ -74,7 +68,7 @@ function computeIdealAttestationsRewardsAndPenaltiesAltair( config: BeaconConfig, state: CachedBeaconStateAllForks, transitionCache: EpochTransitionCache -): [IdealAttestationsReward[], AttestationsPenalty[]] { +): [rewards.IdealAttestationsReward[], AttestationsPenalty[]] { const baseRewardPerIncrement = transitionCache.baseRewardPerIncrement; const activeBalanceByIncrement = transitionCache.totalActiveStakeByIncrement; const fork = config.getForkName(state.slot); @@ -98,7 +92,7 @@ function computeIdealAttestationsRewardsAndPenaltiesAltair( const weight = PARTICIPATION_FLAG_WEIGHTS[i]; let unslashedStakeByIncrement: number; - let flagName: keyof IdealAttestationsReward; + let flagName: keyof rewards.IdealAttestationsReward; switch (i) { case TIMELY_SOURCE_FLAG_INDEX: { @@ -151,10 +145,10 @@ function computeTotalAttestationsRewardsAltair( pubkey2index: PubkeyIndexMap, state: CachedBeaconStateAltair, transitionCache: EpochTransitionCache, - idealRewards: IdealAttestationsReward[], + idealRewards: rewards.IdealAttestationsReward[], penalties: AttestationsPenalty[], validatorIds: (ValidatorIndex | string)[] = [] -): TotalAttestationsReward[] { +): rewards.TotalAttestationsReward[] { const rewards = []; const {flags} = transitionCache; const {epochCtx} = state; diff --git a/packages/beacon-node/src/chain/rewards/blockRewards.ts b/packages/state-transition/src/rewards/blockRewards.ts similarity index 93% rename from packages/beacon-node/src/chain/rewards/blockRewards.ts rename to packages/state-transition/src/rewards/blockRewards.ts index 3bdc791042..35e1875950 100644 --- a/packages/beacon-node/src/chain/rewards/blockRewards.ts +++ b/packages/state-transition/src/rewards/blockRewards.ts @@ -1,4 +1,3 @@ -import {routes} from "@lodestar/api"; import {BeaconConfig} from "@lodestar/config"; import { ForkName, @@ -6,16 +5,11 @@ import { WHISTLEBLOWER_REWARD_QUOTIENT_ELECTRA, isForkPostElectra, } from "@lodestar/params"; -import { - CachedBeaconStateAllForks, - CachedBeaconStateAltair, - CachedBeaconStatePhase0, - getAttesterSlashableIndices, - processAttestationsAltair, -} from "@lodestar/state-transition"; -import {BeaconBlock, altair, phase0} from "@lodestar/types"; +import {BeaconBlock, altair, phase0, rewards} from "@lodestar/types"; +import {processAttestationsAltair} from "../block/processAttestationsAltair.js"; +import {CachedBeaconStateAllForks, CachedBeaconStateAltair, CachedBeaconStatePhase0} from "../cache/stateCache.js"; +import {getAttesterSlashableIndices} from "../util/attestation.js"; -export type BlockRewards = routes.beacon.BlockRewards; type SubRewardValue = number; // All reward values should be integer /** @@ -31,7 +25,7 @@ export async function computeBlockRewards( block: BeaconBlock, preState: CachedBeaconStateAllForks, postState?: CachedBeaconStateAllForks -): Promise { +): Promise { const fork = config.getForkName(block.slot); const {attestations: cachedAttestationsReward = 0, syncAggregate: cachedSyncAggregateReward = 0} = postState?.proposerRewards ?? {}; diff --git a/packages/state-transition/src/rewards/index.ts b/packages/state-transition/src/rewards/index.ts new file mode 100644 index 0000000000..a667a28fe7 --- /dev/null +++ b/packages/state-transition/src/rewards/index.ts @@ -0,0 +1,3 @@ +export * from "./attestationsRewards.js"; +export * from "./blockRewards.js"; +export * from "./syncCommitteeRewards.js"; diff --git a/packages/beacon-node/src/chain/rewards/syncCommitteeRewards.ts b/packages/state-transition/src/rewards/syncCommitteeRewards.ts similarity index 86% rename from packages/beacon-node/src/chain/rewards/syncCommitteeRewards.ts rename to packages/state-transition/src/rewards/syncCommitteeRewards.ts index caba088177..60b19d82ea 100644 --- a/packages/beacon-node/src/chain/rewards/syncCommitteeRewards.ts +++ b/packages/state-transition/src/rewards/syncCommitteeRewards.ts @@ -1,10 +1,9 @@ -import {routes} from "@lodestar/api"; import {BeaconConfig} from "@lodestar/config"; import {ForkName, SYNC_COMMITTEE_SIZE} from "@lodestar/params"; -import {CachedBeaconStateAllForks, CachedBeaconStateAltair, Index2PubkeyCache} from "@lodestar/state-transition"; -import {BeaconBlock, ValidatorIndex, altair} from "@lodestar/types"; +import {BeaconBlock, ValidatorIndex, altair, rewards} from "@lodestar/types"; +import {Index2PubkeyCache} from "../cache/pubkeyCache.js"; +import {CachedBeaconStateAllForks, CachedBeaconStateAltair} from "../cache/stateCache.js"; -export type SyncCommitteeRewards = routes.beacon.SyncCommitteeRewards; type BalanceRecord = {val: number}; // Use val for convenient way to increment/decrement balance export async function computeSyncCommitteeRewards( @@ -13,7 +12,7 @@ export async function computeSyncCommitteeRewards( block: BeaconBlock, preState: CachedBeaconStateAllForks, validatorIds: (ValidatorIndex | string)[] = [] -): Promise { +): Promise { const fork = config.getForkName(block.slot); if (fork === ForkName.phase0) { throw Error("Cannot get sync rewards as phase0 block does not have sync committee"); diff --git a/packages/beacon-node/test/unit/chain/rewards/blockRewards.test.ts b/packages/state-transition/test/unit/rewards/blockRewards.test.ts similarity index 92% rename from packages/beacon-node/test/unit/chain/rewards/blockRewards.test.ts rename to packages/state-transition/test/unit/rewards/blockRewards.test.ts index 67502b2f00..522e494c51 100644 --- a/packages/beacon-node/test/unit/chain/rewards/blockRewards.test.ts +++ b/packages/state-transition/test/unit/rewards/blockRewards.test.ts @@ -2,19 +2,13 @@ import {describe, expect, it, vi} from "vitest"; import {createBeaconConfig} from "@lodestar/config"; import {chainConfig as chainConfigDef} from "@lodestar/config/default"; import {SYNC_COMMITTEE_SIZE} from "@lodestar/params"; -import { - CachedBeaconStateAllForks, - DataAvailabilityStatus, - ExecutionPayloadStatus, - stateTransition, -} from "@lodestar/state-transition"; import {ssz} from "@lodestar/types"; -import {BlockAltairOpts, getBlockAltair} from "../../../../../state-transition/test/perf/block/util.js"; -import { - cachedStateAltairPopulateCaches, - generatePerfTestCachedStateAltair, -} from "../../../../../state-transition/test/perf/util.js"; -import {computeBlockRewards} from "../../../../src/chain/rewards/blockRewards.js"; +import {DataAvailabilityStatus, ExecutionPayloadStatus} from "../../../src/block/externalData.js"; +import {computeBlockRewards} from "../../../src/rewards/blockRewards.js"; +import {stateTransition} from "../../../src/stateTransition.js"; +import {CachedBeaconStateAllForks} from "../../../src/types.js"; +import {BlockAltairOpts, getBlockAltair} from "../../perf/block/util.js"; +import {cachedStateAltairPopulateCaches, generatePerfTestCachedStateAltair} from "../../perf/util.js"; describe("chain / rewards / blockRewards", () => { const config = createBeaconConfig({...chainConfigDef, ALTAIR_FORK_EPOCH: 0}, Buffer.alloc(32, 0xaa)); diff --git a/packages/types/src/index.ts b/packages/types/src/index.ts index 3f907e3c26..959fb13807 100644 --- a/packages/types/src/index.ts +++ b/packages/types/src/index.ts @@ -5,9 +5,11 @@ import * as ssz from "./sszTypes.js"; import {sszTypesFor} from "./sszTypes.js"; export {sszTypesFor, SSZTypesFor, ssz}; +export * from "./utils/array.js"; // Container utils export * from "./utils/container.js"; export {ExecutionAddressType} from "./utils/executionAddress.js"; +export * as rewards from "./utils/rewards.js"; // String type export {StringType, stringType} from "./utils/stringType.js"; // Typeguards diff --git a/packages/types/src/utils/array.ts b/packages/types/src/utils/array.ts new file mode 100644 index 0000000000..0fb3680138 --- /dev/null +++ b/packages/types/src/utils/array.ts @@ -0,0 +1,11 @@ +import {ArrayType, ListBasicType, ListCompositeType, Type, isBasicType, isCompositeType} from "@chainsafe/ssz"; + +export function ArrayOf(elementType: Type, limit = Infinity): ArrayType, unknown, unknown> { + if (isCompositeType(elementType)) { + return new ListCompositeType(elementType, limit) as unknown as ArrayType, unknown, unknown>; + } + if (isBasicType(elementType)) { + return new ListBasicType(elementType, limit) as unknown as ArrayType, unknown, unknown>; + } + throw Error(`Unknown type ${elementType.typeName}`); +} diff --git a/packages/types/src/utils/rewards.ts b/packages/types/src/utils/rewards.ts new file mode 100644 index 0000000000..32bc5995c6 --- /dev/null +++ b/packages/types/src/utils/rewards.ts @@ -0,0 +1,102 @@ +import {ContainerType, ValueOf} from "@chainsafe/ssz"; +import {UintNum64, ValidatorIndex} from "../sszTypes.js"; +import {ArrayOf} from "./array.js"; + +export const BlockRewardsType = new ContainerType( + { + /** Proposer of the block, the proposer index who receives these rewards */ + proposerIndex: ValidatorIndex, + /** Total block reward, equal to attestations + sync_aggregate + proposer_slashings + attester_slashings */ + total: UintNum64, + /** Block reward component due to included attestations */ + attestations: UintNum64, + /** Block reward component due to included sync_aggregate */ + syncAggregate: UintNum64, + /** Block reward component due to included proposer_slashings */ + proposerSlashings: UintNum64, + /** Block reward component due to included attester_slashings */ + attesterSlashings: UintNum64, + }, + {jsonCase: "eth2"} +); + +export const AttestationsRewardType = new ContainerType( + { + /** Reward for head vote. Could be negative to indicate penalty */ + head: UintNum64, + /** Reward for target vote. Could be negative to indicate penalty */ + target: UintNum64, + /** Reward for source vote. Could be negative to indicate penalty */ + source: UintNum64, + /** Inclusion delay reward (phase0 only) */ + inclusionDelay: UintNum64, + /** Inactivity penalty. Should be a negative number to indicate penalty */ + inactivity: UintNum64, + }, + {jsonCase: "eth2"} +); + +export const IdealAttestationsRewardsType = new ContainerType( + { + ...AttestationsRewardType.fields, + effectiveBalance: UintNum64, + }, + {jsonCase: "eth2"} +); + +export const TotalAttestationsRewardsType = new ContainerType( + { + ...AttestationsRewardType.fields, + validatorIndex: ValidatorIndex, + }, + {jsonCase: "eth2"} +); + +export const AttestationsRewardsType = new ContainerType( + { + idealRewards: ArrayOf(IdealAttestationsRewardsType), + totalRewards: ArrayOf(TotalAttestationsRewardsType), + }, + {jsonCase: "eth2"} +); + +export const SyncCommitteeRewardsType = ArrayOf( + new ContainerType( + { + validatorIndex: ValidatorIndex, + reward: UintNum64, + }, + {jsonCase: "eth2"} + ) +); + +/** + * Rewards info for a single block. Every reward value is in Gwei. + */ +export type BlockRewards = ValueOf; + +/** + * Rewards for a single set of (ideal or actual depending on usage) attestations. Reward value is in Gwei + */ +export type AttestationsReward = ValueOf; + +/** + * Rewards info for ideal attestations ie. Maximum rewards could be earned by making timely head, target and source vote. + * `effectiveBalance` is in Gwei + */ +export type IdealAttestationsReward = ValueOf; + +/** + * Rewards info for actual attestations + */ +export type TotalAttestationsReward = ValueOf; + +export type AttestationsRewards = ValueOf; + +/** + * Rewards info for sync committee participation. Every reward value is in Gwei. + * Note: In the case that block proposer is present in `SyncCommitteeRewards`, the reward value only reflects rewards for + * participating in sync committee. Please refer to `BlockRewards.syncAggregate` for rewards of proposer including sync committee + * outputs into their block + */ +export type SyncCommitteeRewards = ValueOf;