mirror of
https://github.com/ChainSafe/lodestar.git
synced 2026-01-09 15:48:08 -05:00
chore: v1.38.0 release (#8687)
This commit is contained in:
@@ -3,7 +3,7 @@
|
||||
{
|
||||
"name": "Node.js & TypeScript",
|
||||
// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
|
||||
"image": "mcr.microsoft.com/devcontainers/typescript-node:1-22-bullseye",
|
||||
"image": "mcr.microsoft.com/devcontainers/typescript-node:1-24-bullseye",
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/python:1": {}
|
||||
}
|
||||
|
||||
2
.github/workflows/benchmark.yml
vendored
2
.github/workflows/benchmark.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 24
|
||||
check-latest: true
|
||||
cache: yarn
|
||||
- name: Node.js version
|
||||
|
||||
2
.github/workflows/binaries.yml
vendored
2
.github/workflows/binaries.yml
vendored
@@ -42,7 +42,7 @@ jobs:
|
||||
sudo apt-get install -y build-essential python3
|
||||
- uses: "./.github/actions/setup-and-build"
|
||||
with:
|
||||
node: 22
|
||||
node: 24
|
||||
- run: |
|
||||
mkdir -p dist
|
||||
yarn global add @chainsafe/caxa@3.0.6
|
||||
|
||||
32
.github/workflows/build-debug-node.yml
vendored
32
.github/workflows/build-debug-node.yml
vendored
@@ -1,11 +1,11 @@
|
||||
name: Build debug node
|
||||
name: Build debug node
|
||||
|
||||
on:
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
required: true
|
||||
description: 'Node.js version'
|
||||
description: "Node.js version"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -14,34 +14,34 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
- name: Install dependencies
|
||||
run: apt-get install python3 g++ make python3-pip
|
||||
|
||||
- name: Download Node.js source
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: 'nodejs/node'
|
||||
ref: 'v${{ github.event.inputs.version }}'
|
||||
path: 'nodejs'
|
||||
with:
|
||||
repository: "nodejs/node"
|
||||
ref: "v${{ github.event.inputs.version }}"
|
||||
path: "nodejs"
|
||||
|
||||
- name: Configure nodejs with debug flag
|
||||
- name: Configure nodejs with debug flag
|
||||
run: ./configure --debug
|
||||
working-directory: 'nodejs'
|
||||
working-directory: "nodejs"
|
||||
|
||||
- name: Compile the nodejs
|
||||
run: make -j$(nproc --all)
|
||||
working-directory: 'nodejs'
|
||||
working-directory: "nodejs"
|
||||
|
||||
- name: Verify the build
|
||||
run: make test-only
|
||||
working-directory: 'nodejs'
|
||||
- name: Verify the build
|
||||
run: make test-only
|
||||
working-directory: "nodejs"
|
||||
|
||||
- name: Create destination folder
|
||||
- name: Create destination folder
|
||||
run: mkdir -p ${{ github.workspace }}/nodejs-debug-build-${{ github.event.inputs.version }}
|
||||
|
||||
- name: Copy nodejs debug build
|
||||
run: cp out/Debug/node ${{ github.workspace }}/nodejs-debug-build-${{ github.event.inputs.version }}
|
||||
working-directory: 'nodejs'
|
||||
working-directory: "nodejs"
|
||||
|
||||
- name: Upload build to artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
2
.github/workflows/docs-check.yml
vendored
2
.github/workflows/docs-check.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 24
|
||||
cache: yarn
|
||||
- name: Node.js version
|
||||
id: node
|
||||
|
||||
2
.github/workflows/docs.yml
vendored
2
.github/workflows/docs.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 24
|
||||
check-latest: true
|
||||
cache: yarn
|
||||
|
||||
|
||||
2
.github/workflows/kurtosis.yml
vendored
2
.github/workflows/kurtosis.yml
vendored
@@ -24,4 +24,4 @@ jobs:
|
||||
- name: Run test
|
||||
uses: ethpandaops/kurtosis-assertoor-github-action@v1
|
||||
with:
|
||||
ethereum_package_args: '.github/workflows/assets/kurtosis_sim_test_config.yaml'
|
||||
ethereum_package_args: ".github/workflows/assets/kurtosis_sim_test_config.yaml"
|
||||
|
||||
22
.github/workflows/publish-dev.yml
vendored
22
.github/workflows/publish-dev.yml
vendored
@@ -5,22 +5,26 @@ concurrency:
|
||||
group: cd-publish-dev
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- unstable
|
||||
workflow_call:
|
||||
|
||||
permissions:
|
||||
contents: write # Required for OIDC
|
||||
id-token: write # Required to create a Github release
|
||||
pull-requests: write # Required to add tags to pull requests
|
||||
|
||||
jobs:
|
||||
npm:
|
||||
name: Publish to NPM Registry
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
# To support npm provenance we must use github-hosted runners
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# <common-build> - Uses YAML anchors in the future
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 24
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
check-latest: true
|
||||
cache: yarn
|
||||
@@ -57,7 +61,7 @@ jobs:
|
||||
# This "temp" commit doesn't change the actually release commit which is captured above.
|
||||
# git-data is also correct, since it's generated at build time, before `lerna version` run.
|
||||
run: |
|
||||
node_modules/.bin/lerna version ${{ steps.version.outputs.version }} \
|
||||
yarn lerna version ${{ steps.version.outputs.version }} \
|
||||
--force-publish \
|
||||
--exact \
|
||||
--yes \
|
||||
@@ -92,15 +96,13 @@ jobs:
|
||||
#
|
||||
# NOTE: Using --preid dev.$(git rev-parse --short=7 HEAD) results in `0.24.3-dev.3ddb91d.0+3ddb91d`
|
||||
run: |
|
||||
node_modules/.bin/lerna publish from-package \
|
||||
yarn lerna publish from-package \
|
||||
--yes \
|
||||
--no-verify-access \
|
||||
--dist-tag next \
|
||||
--no-git-reset \
|
||||
--force-publish \
|
||||
--exact
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
outputs:
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
|
||||
|
||||
22
.github/workflows/publish-nextfork.yml
vendored
22
.github/workflows/publish-nextfork.yml
vendored
@@ -5,9 +5,12 @@ concurrency:
|
||||
group: cd-publish-nextfork
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- peerDAS # Nextfork branch
|
||||
workflow_call:
|
||||
|
||||
permissions:
|
||||
contents: write # Required for OIDC
|
||||
id-token: write # Required to create a Github release
|
||||
pull-requests: write # Required to add tags to pull requests
|
||||
|
||||
env:
|
||||
NEXT_FORK: peerDAS
|
||||
@@ -15,15 +18,16 @@ env:
|
||||
jobs:
|
||||
npm:
|
||||
name: Publish to NPM Registry
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
# To support npm provenance we must use github-hosted runners
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# <common-build> - Uses YAML anchors in the future
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 24
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
check-latest: true
|
||||
cache: yarn
|
||||
@@ -60,7 +64,7 @@ jobs:
|
||||
# This "temp" commit doesn't change the actually release commit which is captured above.
|
||||
# git-data is also correct, since it's generated at build time, before `lerna version` run.
|
||||
run: |
|
||||
node_modules/.bin/lerna version ${{ steps.version.outputs.version }} \
|
||||
yarn lerna version ${{ steps.version.outputs.version }} \
|
||||
--force-publish \
|
||||
--exact \
|
||||
--yes \
|
||||
@@ -95,15 +99,13 @@ jobs:
|
||||
#
|
||||
# NOTE: Using --preid dev.$(git rev-parse --short=7 HEAD) results in `0.24.3-dev.3ddb91d.0+3ddb91d`
|
||||
run: |
|
||||
node_modules/.bin/lerna publish from-package \
|
||||
yarn lerna publish from-package \
|
||||
--yes \
|
||||
--no-verify-access \
|
||||
--dist-tag next \
|
||||
--no-git-reset \
|
||||
--force-publish \
|
||||
--exact
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
outputs:
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
|
||||
|
||||
29
.github/workflows/publish-rc.yml
vendored
29
.github/workflows/publish-rc.yml
vendored
@@ -5,14 +5,18 @@ concurrency: cd-publish-rc
|
||||
|
||||
# See for rationale https://github.com/ChainSafe/lodestar/blob/unstable/RELEASE.md
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
workflow_call:
|
||||
|
||||
permissions:
|
||||
contents: write # Required for OIDC
|
||||
id-token: write # Required to create a Github release
|
||||
pull-requests: write # Required to add tags to pull requests
|
||||
|
||||
jobs:
|
||||
tag:
|
||||
name: Check tag
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
# To support npm provenance we must use github-hosted runners
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
@@ -51,23 +55,23 @@ jobs:
|
||||
|
||||
npm:
|
||||
name: Publish to NPM & Github
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
runs-on: ubuntu-latest
|
||||
needs: [tag, binaries]
|
||||
if: needs.tag.outputs.is_rc == 'true'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0 # Needs full depth for changelog generation
|
||||
|
||||
- uses: "./.github/actions/setup-and-build"
|
||||
with:
|
||||
node: 22
|
||||
node: 24
|
||||
|
||||
- name: Generate changelog
|
||||
run: node scripts/generate_changelog.mjs ${{ needs.tag.outputs.prev_tag }} ${{ needs.tag.outputs.tag }} CHANGELOG.md
|
||||
|
||||
- name: Get binaries
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: dist/
|
||||
merge-multiple: true
|
||||
@@ -91,7 +95,7 @@ jobs:
|
||||
# This "temp" commit doesn't change the actually release commit which is captured above.
|
||||
# git-data is also correct, since it's generated at build time, before `lerna version` run.
|
||||
run: |
|
||||
node_modules/.bin/lerna version ${{ needs.tag.outputs.version }} \
|
||||
yarn lerna version ${{ needs.tag.outputs.version }} \
|
||||
--force-publish \
|
||||
--exact \
|
||||
--yes \
|
||||
@@ -101,15 +105,8 @@ jobs:
|
||||
git config user.email 'temp@github.com'
|
||||
git commit -am "${{ needs.tag.outputs.version }}"
|
||||
|
||||
# From https://github.com/lerna/lerna/issues/2404
|
||||
- run: echo //registry.npmjs.org/:_authToken=${NPM_TOKEN} > .npmrc
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
- name: Publish to npm registry
|
||||
run: yarn run release:publish --dist-tag rc
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
# In case of failure
|
||||
- name: Rollback on failure
|
||||
|
||||
27
.github/workflows/publish-stable.yml
vendored
27
.github/workflows/publish-stable.yml
vendored
@@ -5,17 +5,21 @@ concurrency: cd-publish-stable
|
||||
|
||||
# See for rationale https://github.com/ChainSafe/lodestar/blob/unstable/RELEASE.md
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
workflow_call:
|
||||
|
||||
permissions:
|
||||
contents: write # Required for OIDC
|
||||
id-token: write # Required to create a Github release
|
||||
pull-requests: write # Required to add tags to pull requests
|
||||
|
||||
jobs:
|
||||
tag:
|
||||
name: Check tag
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
# To support npm provenance we must use github-hosted runners
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -57,7 +61,7 @@ jobs:
|
||||
|
||||
npm:
|
||||
name: Publish to NPM & Github
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
runs-on: ubuntu-latest
|
||||
needs: [tag, binaries]
|
||||
if: needs.tag.outputs.is_stable == 'true'
|
||||
steps:
|
||||
@@ -67,7 +71,7 @@ jobs:
|
||||
|
||||
- uses: "./.github/actions/setup-and-build"
|
||||
with:
|
||||
node: 22
|
||||
node: 24
|
||||
|
||||
- name: Generate changelog
|
||||
run: node scripts/generate_changelog.mjs ${{ needs.tag.outputs.prev_tag }} ${{ needs.tag.outputs.tag }} CHANGELOG.md
|
||||
@@ -91,15 +95,8 @@ jobs:
|
||||
name: Release ${{ needs.tag.outputs.tag }}
|
||||
prerelease: false
|
||||
|
||||
# From https://github.com/lerna/lerna/issues/2404
|
||||
- run: echo //registry.npmjs.org/:_authToken=${NPM_TOKEN} > .npmrc
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
- name: Publish to npm registry (release)
|
||||
run: yarn run release:publish
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
# In case of failure
|
||||
- name: Rollback on failure
|
||||
@@ -114,7 +111,7 @@ jobs:
|
||||
|
||||
comment:
|
||||
name: Comment on included PRs
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
runs-on: ubuntu-latest
|
||||
needs: [tag, npm]
|
||||
if: needs.tag.outputs.is_stable == 'true'
|
||||
steps:
|
||||
|
||||
48
.github/workflows/publish.yml
vendored
Normal file
48
.github/workflows/publish.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
name: Publish
|
||||
|
||||
# only one can run at a time
|
||||
concurrency: cd-publish
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
branches:
|
||||
# - peerDAS # Nextfork branch
|
||||
- unstable
|
||||
|
||||
permissions:
|
||||
contents: write # Required for OIDC
|
||||
id-token: write # Required to create a Github release
|
||||
pull-requests: write # Required to add tags to pull requests
|
||||
|
||||
jobs:
|
||||
publish-stable:
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v') && (
|
||||
!contains(github.ref_name, '-rc') &&
|
||||
!contains(github.ref_name, '-beta') &&
|
||||
!contains(github.ref_name, '-alpha')
|
||||
)
|
||||
uses: ./.github/workflows/publish-stable.yml
|
||||
secrets: inherit
|
||||
|
||||
publish-rc:
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v') && (
|
||||
contains(github.ref_name, '-rc') ||
|
||||
contains(github.ref_name, '-beta') ||
|
||||
contains(github.ref_name, '-alpha')
|
||||
)
|
||||
uses: ./.github/workflows/publish-rc.yml
|
||||
secrets: inherit
|
||||
|
||||
# publish-nextfork:
|
||||
# if: github.ref == 'refs/heads/peerDAS'
|
||||
# uses: ./.github/workflows/publish-next-fork.yml
|
||||
# secrets: inherit
|
||||
|
||||
publish-dev:
|
||||
if: github.ref == 'refs/heads/unstable'
|
||||
uses: ./.github/workflows/publish-dev.yml
|
||||
secrets: inherit
|
||||
2
.github/workflows/test-bun.yml
vendored
2
.github/workflows/test-bun.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 24
|
||||
cache: yarn
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
|
||||
2
.github/workflows/test-sim-merge.yml
vendored
2
.github/workflows/test-sim-merge.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: "./.github/actions/setup-and-build"
|
||||
with:
|
||||
node: 22
|
||||
node: 24
|
||||
|
||||
- name: Pull Geth
|
||||
run: docker pull $GETH_IMAGE
|
||||
|
||||
12
.github/workflows/test-sim.yml
vendored
12
.github/workflows/test-sim.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: "./.github/actions/setup-and-build"
|
||||
with:
|
||||
node: 22
|
||||
node: 24
|
||||
|
||||
sim-test-multifork:
|
||||
name: Multifork sim test
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: "./.github/actions/setup-and-build"
|
||||
with:
|
||||
node: 22
|
||||
node: 24
|
||||
- name: Load env variables
|
||||
uses: ./.github/actions/dotenv
|
||||
- name: Download required docker images before running tests
|
||||
@@ -71,7 +71,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: "./.github/actions/setup-and-build"
|
||||
with:
|
||||
node: 22
|
||||
node: 24
|
||||
- name: Load env variables
|
||||
uses: ./.github/actions/dotenv
|
||||
- name: Download required docker images before running tests
|
||||
@@ -100,7 +100,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: "./.github/actions/setup-and-build"
|
||||
with:
|
||||
node: 22
|
||||
node: 24
|
||||
- name: Load env variables
|
||||
uses: ./.github/actions/dotenv
|
||||
- name: Download required docker images before running tests
|
||||
@@ -129,7 +129,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: "./.github/actions/setup-and-build"
|
||||
with:
|
||||
node: 22
|
||||
node: 24
|
||||
- name: Load env variables
|
||||
uses: ./.github/actions/dotenv
|
||||
- name: Download required docker images before running tests
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: "./.github/actions/setup-and-build"
|
||||
with:
|
||||
node: 22
|
||||
node: 24
|
||||
- name: Load env variables
|
||||
uses: ./.github/actions/dotenv
|
||||
- name: Download required docker images before running tests
|
||||
|
||||
14
.github/workflows/test.yml
vendored
14
.github/workflows/test.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node: [22]
|
||||
node: [24]
|
||||
steps:
|
||||
# <common-build> - Uses YAML anchors in the future
|
||||
- uses: actions/checkout@v4
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node: [22]
|
||||
node: [24]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: "./.github/actions/setup-and-build"
|
||||
@@ -71,7 +71,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node: [22]
|
||||
node: [24]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
@@ -92,7 +92,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node: [22]
|
||||
node: [24]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: "./.github/actions/setup-and-build"
|
||||
@@ -131,7 +131,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node: [22]
|
||||
node: [24]
|
||||
steps:
|
||||
# <common-build> - Uses YAML anchors in the future
|
||||
- uses: actions/checkout@v4
|
||||
@@ -168,7 +168,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node: [22]
|
||||
node: [24]
|
||||
steps:
|
||||
# <common-build> - Uses YAML anchors in the future
|
||||
- uses: actions/checkout@v4
|
||||
@@ -190,7 +190,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node: [22]
|
||||
node: [24]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: "./.github/actions/setup-and-build"
|
||||
|
||||
5
.vscode/launch.template.json
vendored
5
.vscode/launch.template.json
vendored
@@ -50,7 +50,10 @@
|
||||
"${input:testName}",
|
||||
"--pool",
|
||||
"forks",
|
||||
"--poolOptions.forks.singleFork"
|
||||
"--maxWorkers",
|
||||
"1",
|
||||
"--isolate",
|
||||
"true"
|
||||
],
|
||||
"cwd": "${workspaceFolder}/${input:packageName}",
|
||||
"console": "integratedTerminal",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
|
||||
# --platform=$BUILDPLATFORM is used build javascript source with host arch
|
||||
# Otherwise TS builds on emulated archs and can be extremely slow (+1h)
|
||||
FROM --platform=${BUILDPLATFORM:-amd64} node:22-slim AS build_src
|
||||
FROM --platform=${BUILDPLATFORM:-amd64} node:24-slim AS build_src
|
||||
ARG COMMIT
|
||||
WORKDIR /usr/app
|
||||
RUN apt-get update && apt-get install -y git g++ make python3 python3-setuptools && apt-get clean && rm -rf /var/lib/apt/lists/*
|
||||
@@ -21,7 +21,7 @@ RUN cd packages/cli && GIT_COMMIT=${COMMIT} yarn write-git-data
|
||||
|
||||
# Copy built src + node_modules to build native packages for archs different than host.
|
||||
# Note: This step is redundant for the host arch
|
||||
FROM node:22-slim AS build_deps
|
||||
FROM node:24-slim AS build_deps
|
||||
WORKDIR /usr/app
|
||||
RUN apt-get update && apt-get install -y git g++ make python3 python3-setuptools && apt-get clean && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
@@ -35,7 +35,7 @@ RUN cd node_modules/classic-level && yarn rebuild
|
||||
|
||||
# Copy built src + node_modules to a new layer to prune unnecessary fs
|
||||
# Previous layer weights 7.25GB, while this final 488MB (as of Oct 2020)
|
||||
FROM node:22-slim
|
||||
FROM node:24-slim
|
||||
WORKDIR /usr/app
|
||||
COPY --from=build_deps /usr/app .
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# Run this on local host only.
|
||||
# Generated image is larger than production image. Do not use this for production.
|
||||
|
||||
FROM --platform=${BUILDPLATFORM:-amd64} node:22 AS build_dev
|
||||
FROM --platform=${BUILDPLATFORM:-amd64} node:24 AS build_dev
|
||||
ARG COMMIT
|
||||
|
||||
WORKDIR /usr/app
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
[](https://hub.docker.com/r/chainsafe/lodestar)
|
||||
[](https://github.com/ethereum/consensus-specs/releases/tag/v1.5.0)
|
||||

|
||||

|
||||

|
||||
[](https://codecov.io/gh/ChainSafe/lodestar)
|
||||
[](https://www.gitpoap.io/gh/ChainSafe/lodestar)
|
||||
|
||||
@@ -51,6 +51,7 @@ yarn build
|
||||
| [`@lodestar/beacon-node`](./packages/beacon-node) | [](https://www.npmjs.com/package/@lodestar/beacon-node) | [](https://opensource.org/licenses/Apache-2.0) | [](./packages/beacon-node) | :rotating_light: Beacon-chain client |
|
||||
| [`@lodestar/config`](./packages/config) | [](https://www.npmjs.com/package/@lodestar/config) | [](https://opensource.org/licenses/Apache-2.0) | [](./packages/config) | :spiral_notepad: Eth Consensus types and params bundled together |
|
||||
| [`@lodestar/db`](./packages/db) | [](https://www.npmjs.com/package/@lodestar/db) | [](https://opensource.org/licenses/Apache-2.0) | [](./packages/db) | :floppy_disk: Read/write persistent Eth Consensus data |
|
||||
| [`@lodestar/era`](./packages/era) | [](https://www.npmjs.com/package/@lodestar/era) | [](https://opensource.org/licenses/Apache-2.0) | [](./packages/era) | :file_folder: Era file handling for historical beacon chain data |
|
||||
| [`@lodestar/flare`](./packages/flare) | [](https://www.npmjs.com/package/@lodestar/flare) | [](https://opensource.org/licenses/Apache-2.0) | [](./packages/flare) | :boom: Command tool for triggering non-standard actions |
|
||||
| [`@lodestar/fork-choice`](./packages/fork-choice) | [](https://www.npmjs.com/package/@lodestar/fork-choice) | [](https://opensource.org/licenses/Apache-2.0) | [](./packages/fork-choice) | :fork_and_knife: Beacon-chain fork choice |
|
||||
| [`@lodestar/light-client`](./packages/light-client) | [](https://www.npmjs.com/package/@lodestar/light-client) | [](https://opensource.org/licenses/Apache-2.0) | [](./packages/light-client) | :bird: Ethereum Light client |
|
||||
|
||||
@@ -517,16 +517,73 @@
|
||||
"type": "prometheus",
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"exemplar": false,
|
||||
"expr": "validator_monitor_validators",
|
||||
"instant": true,
|
||||
"interval": "",
|
||||
"legendFormat": "",
|
||||
"range": false,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Validators connected",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"mappings": [],
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 3,
|
||||
"w": 2,
|
||||
"x": 20,
|
||||
"y": 1
|
||||
},
|
||||
"id": 543,
|
||||
"options": {
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"showPercentChange": false,
|
||||
"textMode": "auto",
|
||||
"wideLayout": true
|
||||
},
|
||||
"pluginVersion": "10.4.1",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"exemplar": false,
|
||||
"expr": "beacon_target_custody_group_count",
|
||||
"instant": true,
|
||||
"legendFormat": "__auto",
|
||||
"range": false,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Custody Group Count",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
@@ -543,8 +600,8 @@
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 3,
|
||||
"w": 2,
|
||||
"x": 20,
|
||||
"w": 1,
|
||||
"x": 22,
|
||||
"y": 1
|
||||
},
|
||||
"id": 72,
|
||||
@@ -579,7 +636,7 @@
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "CPU %",
|
||||
"title": "CPU",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
@@ -596,8 +653,8 @@
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 3,
|
||||
"w": 2,
|
||||
"x": 22,
|
||||
"w": 1,
|
||||
"x": 23,
|
||||
"y": 1
|
||||
},
|
||||
"id": 73,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# To specify a specific network (defaults to mainnet) set this value.
|
||||
# Allowed values are: mainnet, gnosis, holesky, sepolia, hoodi and chiado. Source for currently supported networks: https://github.com/ChainSafe/lodestar/blob/unstable/packages/cli/src/networks/index.ts#L21
|
||||
# Allowed values are: mainnet, gnosis, sepolia, hoodi, chiado, ephemery, and dev. Source for currently supported networks: https://github.com/ChainSafe/lodestar/blob/unstable/packages/cli/src/networks/index.ts#L26
|
||||
LODESTAR_NETWORK=mainnet
|
||||
|
||||
# Set a custom admin password to prevent having to signup.
|
||||
|
||||
@@ -42,7 +42,7 @@ Docker is the recommended setup for Lodestar. Use our [Lodestar Quickstart scrip
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Make sure to have [Yarn installed](https://classic.yarnpkg.com/en/docs/install). It is also recommended to [install NVM (Node Version Manager)](https://github.com/nvm-sh/nvm) and use the LTS version (currently v22) of [NodeJS](https://nodejs.org/en/).
|
||||
Make sure to have [Yarn installed](https://classic.yarnpkg.com/en/docs/install). It is also recommended to [install NVM (Node Version Manager)](https://github.com/nvm-sh/nvm) and use the LTS version (currently v24) of [NodeJS](https://nodejs.org/en/).
|
||||
|
||||
:::info
|
||||
NodeJS versions older than the current LTS are not supported by Lodestar. We recommend running the latest Node LTS.
|
||||
|
||||
@@ -7,7 +7,7 @@ This guide will provide instructions which include running a local execution nod
|
||||
:::info
|
||||
This guide specifically focuses on using Lodestar's Quickstart scripts which allows for near instant setup with the following technologies:
|
||||
|
||||
- [Ubuntu v22.04 (LTS) x64 server](https://releases.ubuntu.com/22.04/)
|
||||
- [Ubuntu v24.04 (LTS) x64 server](https://releases.ubuntu.com/24.04/)
|
||||
- Ethereum Execution (eth1) clients:
|
||||
- [Erigon](https://github.com/ledgerwatch/erigon/releases) | [Github](https://github.com/ledgerwatch/erigon)
|
||||
- [Go-Ethereum (Geth)](https://geth.ethereum.org/) | [Github](https://github.com/ethereum/go-ethereum/releases/)
|
||||
@@ -39,7 +39,7 @@ This guide assumes knowledge of Ethereum (ETH), Docker, staking and Linux.
|
||||
|
||||
You require the following before getting started:
|
||||
|
||||
- [Ubuntu Server v22.04 (LTS) amd64](https://releases.ubuntu.com/22.04/) or newer, installed and running on a local machine or in the cloud. _A locally running machine is encouraged for greater decentralization — if the cloud provider goes down then all nodes hosted with that provider go down._
|
||||
- [Ubuntu Server v24.04 (LTS) amd64](https://releases.ubuntu.com/24.04/) or newer, installed and running on a local machine or in the cloud. _A locally running machine is encouraged for greater decentralization — if the cloud provider goes down then all nodes hosted with that provider go down._
|
||||
|
||||
- 32 ETH to run a solo validator with Lodestar. If running on testnet, contact us in our [ChainSafe Discord Server](https://discord.gg/642wB3XC3Q) for testnet Ether.
|
||||
|
||||
|
||||
@@ -6289,9 +6289,9 @@ mdast-util-phrasing@^4.0.0:
|
||||
unist-util-is "^6.0.0"
|
||||
|
||||
mdast-util-to-hast@^13.0.0:
|
||||
version "13.1.0"
|
||||
resolved "https://registry.yarnpkg.com/mdast-util-to-hast/-/mdast-util-to-hast-13.1.0.tgz#1ae54d903150a10fe04d59f03b2b95fd210b2124"
|
||||
integrity sha512-/e2l/6+OdGp/FB+ctrJ9Avz71AN/GRH3oi/3KAx/kMnoUsD6q0woXlDT8lLEeViVKE7oZxE7RXzvO3T8kF2/sA==
|
||||
version "13.2.1"
|
||||
resolved "https://registry.yarnpkg.com/mdast-util-to-hast/-/mdast-util-to-hast-13.2.1.tgz#d7ff84ca499a57e2c060ae67548ad950e689a053"
|
||||
integrity sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==
|
||||
dependencies:
|
||||
"@types/hast" "^3.0.0"
|
||||
"@types/mdast" "^4.0.0"
|
||||
|
||||
@@ -3,13 +3,13 @@
|
||||
"packages/*"
|
||||
],
|
||||
"npmClient": "yarn",
|
||||
"useNx": true,
|
||||
"version": "1.37.0",
|
||||
"version": "1.38.0",
|
||||
"stream": true,
|
||||
"command": {
|
||||
"version": {
|
||||
"message": "chore(release): %s"
|
||||
}
|
||||
},
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json"
|
||||
"useWorkspaces": true,
|
||||
"$schema": "node_modules/@lerna-lite/cli/schemas/lerna-schema.json"
|
||||
}
|
||||
|
||||
10
package.json
10
package.json
@@ -2,7 +2,7 @@
|
||||
"name": "root",
|
||||
"private": true,
|
||||
"engines": {
|
||||
"node": ">=22 <23"
|
||||
"node": ">=22 <25"
|
||||
},
|
||||
"packageManager": "yarn@1.22.22+sha256.c17d3797fb9a9115bf375e31bfd30058cac6bc9c3b8807a3d8cb2094794b51ca",
|
||||
"workspaces": [
|
||||
@@ -45,7 +45,11 @@
|
||||
"@biomejs/biome": "^2.2.0",
|
||||
"@chainsafe/benchmark": "^1.2.3",
|
||||
"@chainsafe/biomejs-config": "^1.0.0",
|
||||
"@types/node": "^22.18.6",
|
||||
"@lerna-lite/cli": "^4.9.4",
|
||||
"@lerna-lite/publish": "^4.9.4",
|
||||
"@lerna-lite/run": "^4.9.4",
|
||||
"@lerna-lite/version": "^4.9.4",
|
||||
"@types/node": "^24.10.1",
|
||||
"@types/react": "^19.1.12",
|
||||
"@vitest/browser": "^4.0.7",
|
||||
"@vitest/browser-playwright": "^4.0.7",
|
||||
@@ -56,7 +60,6 @@
|
||||
"electron": "^26.2.2",
|
||||
"https-browserify": "^1.0.0",
|
||||
"jsdom": "^23.0.1",
|
||||
"lerna": "^7.3.0",
|
||||
"libp2p": "2.9.0",
|
||||
"node-gyp": "^9.4.0",
|
||||
"npm-run-all": "^4.1.5",
|
||||
@@ -82,7 +85,6 @@
|
||||
"resolutions": {
|
||||
"dns-over-http-resolver": "^2.1.1",
|
||||
"loupe": "^2.3.6",
|
||||
"testcontainers/**/nan": "^2.19.0",
|
||||
"elliptic": ">=6.6.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
[](https://discord.gg/aMxzVcr)
|
||||
[](https://github.com/ethereum/beacon-APIs/releases/tag/v3.1.0)
|
||||

|
||||

|
||||

|
||||
|
||||
> This package is part of [ChainSafe's Lodestar](https://lodestar.chainsafe.io) project
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"bugs": {
|
||||
"url": "https://github.com/ChainSafe/lodestar/issues"
|
||||
},
|
||||
"version": "1.37.0",
|
||||
"version": "1.38.0",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": {
|
||||
@@ -76,10 +76,10 @@
|
||||
"dependencies": {
|
||||
"@chainsafe/persistent-merkle-tree": "^1.2.1",
|
||||
"@chainsafe/ssz": "^1.2.2",
|
||||
"@lodestar/config": "^1.37.0",
|
||||
"@lodestar/params": "^1.37.0",
|
||||
"@lodestar/types": "^1.37.0",
|
||||
"@lodestar/utils": "^1.37.0",
|
||||
"@lodestar/config": "^1.38.0",
|
||||
"@lodestar/params": "^1.38.0",
|
||||
"@lodestar/types": "^1.38.0",
|
||||
"@lodestar/utils": "^1.38.0",
|
||||
"eventsource": "^2.0.2",
|
||||
"qs": "^6.11.1"
|
||||
},
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
[](https://discord.gg/aMxzVcr)
|
||||
[](https://github.com/ethereum/consensus-specs/releases/tag/v1.5.0)
|
||||

|
||||

|
||||

|
||||
|
||||
> This package is part of [ChainSafe's Lodestar](https://lodestar.chainsafe.io) project
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"bugs": {
|
||||
"url": "https://github.com/ChainSafe/lodestar/issues"
|
||||
},
|
||||
"version": "1.37.0",
|
||||
"version": "1.38.0",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": {
|
||||
@@ -123,6 +123,7 @@
|
||||
"@chainsafe/persistent-merkle-tree": "^1.2.1",
|
||||
"@chainsafe/prometheus-gc-stats": "^1.0.0",
|
||||
"@chainsafe/pubkey-index-map": "^3.0.0",
|
||||
"@chainsafe/snappy-wasm": "^0.5.0",
|
||||
"@chainsafe/ssz": "^1.2.2",
|
||||
"@chainsafe/threads": "^1.11.3",
|
||||
"@crate-crypto/node-eth-kzg": "0.9.1",
|
||||
@@ -140,18 +141,18 @@
|
||||
"@libp2p/peer-id": "^5.1.0",
|
||||
"@libp2p/prometheus-metrics": "^4.3.15",
|
||||
"@libp2p/tcp": "^10.1.8",
|
||||
"@lodestar/api": "^1.37.0",
|
||||
"@lodestar/config": "^1.37.0",
|
||||
"@lodestar/db": "^1.37.0",
|
||||
"@lodestar/fork-choice": "^1.37.0",
|
||||
"@lodestar/light-client": "^1.37.0",
|
||||
"@lodestar/logger": "^1.37.0",
|
||||
"@lodestar/params": "^1.37.0",
|
||||
"@lodestar/reqresp": "^1.37.0",
|
||||
"@lodestar/state-transition": "^1.37.0",
|
||||
"@lodestar/types": "^1.37.0",
|
||||
"@lodestar/utils": "^1.37.0",
|
||||
"@lodestar/validator": "^1.37.0",
|
||||
"@lodestar/api": "^1.38.0",
|
||||
"@lodestar/config": "^1.38.0",
|
||||
"@lodestar/db": "^1.38.0",
|
||||
"@lodestar/fork-choice": "^1.38.0",
|
||||
"@lodestar/light-client": "^1.38.0",
|
||||
"@lodestar/logger": "^1.38.0",
|
||||
"@lodestar/params": "^1.38.0",
|
||||
"@lodestar/reqresp": "^1.38.0",
|
||||
"@lodestar/state-transition": "^1.38.0",
|
||||
"@lodestar/types": "^1.38.0",
|
||||
"@lodestar/utils": "^1.38.0",
|
||||
"@lodestar/validator": "^1.38.0",
|
||||
"@multiformats/multiaddr": "^12.1.3",
|
||||
"datastore-core": "^10.0.2",
|
||||
"datastore-fs": "^10.0.6",
|
||||
@@ -166,7 +167,6 @@
|
||||
"multiformats": "^11.0.1",
|
||||
"prom-client": "^15.1.0",
|
||||
"qs": "^6.11.1",
|
||||
"snappyjs": "^0.7.0",
|
||||
"strict-event-emitter-types": "^2.0.0",
|
||||
"systeminformation": "^5.22.9",
|
||||
"uint8arraylist": "^2.4.7",
|
||||
|
||||
@@ -71,9 +71,11 @@ export async function getStateResponseWithRegen(
|
||||
typeof stateId === "string"
|
||||
? await chain.getStateByStateRoot(stateId, {allowRegen: true})
|
||||
: typeof stateId === "number"
|
||||
? stateId >= chain.forkChoice.getFinalizedBlock().slot
|
||||
? await chain.getStateBySlot(stateId, {allowRegen: true})
|
||||
: await chain.getHistoricalStateBySlot(stateId)
|
||||
? stateId > chain.clock.currentSlot
|
||||
? null // Don't try to serve future slots
|
||||
: stateId >= chain.forkChoice.getFinalizedBlock().slot
|
||||
? await chain.getStateBySlot(stateId, {allowRegen: true})
|
||||
: await chain.getHistoricalStateBySlot(stateId)
|
||||
: await chain.getStateOrBytesByCheckpoint(stateId);
|
||||
|
||||
if (!res) {
|
||||
|
||||
@@ -70,7 +70,8 @@ export async function importBlock(
|
||||
fullyVerifiedBlock: FullyVerifiedBlock,
|
||||
opts: ImportBlockOpts
|
||||
): Promise<void> {
|
||||
const {blockInput, postState, parentBlockSlot, executionStatus, dataAvailabilityStatus} = fullyVerifiedBlock;
|
||||
const {blockInput, postState, parentBlockSlot, executionStatus, dataAvailabilityStatus, indexedAttestations} =
|
||||
fullyVerifiedBlock;
|
||||
const block = blockInput.getBlock();
|
||||
const source = blockInput.getBlockSource();
|
||||
const {slot: blockSlot} = block.message;
|
||||
@@ -138,10 +139,10 @@ export async function importBlock(
|
||||
|
||||
const addAttestation = fork >= ForkSeq.electra ? addAttestationPostElectra : addAttestationPreElectra;
|
||||
|
||||
for (const attestation of attestations) {
|
||||
for (let i = 0; i < attestations.length; i++) {
|
||||
const attestation = attestations[i];
|
||||
try {
|
||||
// TODO Electra: figure out how to reuse the attesting indices computed from state transition
|
||||
const indexedAttestation = postState.epochCtx.getIndexedAttestation(fork, attestation);
|
||||
const indexedAttestation = indexedAttestations[i];
|
||||
const {target, beaconBlockRoot} = attestation.data;
|
||||
|
||||
const attDataRoot = toRootHex(ssz.phase0.AttestationData.hashTreeRoot(indexedAttestation.data));
|
||||
|
||||
@@ -72,7 +72,7 @@ export async function processBlocks(
|
||||
|
||||
// Fully verify a block to be imported immediately after. Does not produce any side-effects besides adding intermediate
|
||||
// states in the state cache through regen.
|
||||
const {postStates, dataAvailabilityStatuses, proposerBalanceDeltas, segmentExecStatus} =
|
||||
const {postStates, dataAvailabilityStatuses, proposerBalanceDeltas, segmentExecStatus, indexedAttestationsByBlock} =
|
||||
await verifyBlocksInEpoch.call(this, parentBlock, relevantBlocks, opts);
|
||||
|
||||
// If segmentExecStatus has lvhForkchoice then, the entire segment should be invalid
|
||||
@@ -94,6 +94,7 @@ export async function processBlocks(
|
||||
// start supporting optimistic syncing/processing
|
||||
dataAvailabilityStatus: dataAvailabilityStatuses[i],
|
||||
proposerBalanceDelta: proposerBalanceDeltas[i],
|
||||
indexedAttestations: indexedAttestationsByBlock[i],
|
||||
// TODO: Make this param mandatory and capture in gossip
|
||||
seenTimestampSec: opts.seenTimestampSec ?? Math.floor(Date.now() / 1000),
|
||||
})
|
||||
|
||||
@@ -2,7 +2,7 @@ import type {ChainForkConfig} from "@lodestar/config";
|
||||
import {MaybeValidExecutionStatus} from "@lodestar/fork-choice";
|
||||
import {ForkSeq} from "@lodestar/params";
|
||||
import {CachedBeaconStateAllForks, DataAvailabilityStatus, computeEpochAtSlot} from "@lodestar/state-transition";
|
||||
import type {Slot, fulu} from "@lodestar/types";
|
||||
import type {IndexedAttestation, Slot, fulu} from "@lodestar/types";
|
||||
import {IBlockInput} from "./blockInput/types.js";
|
||||
|
||||
export enum GossipedInputType {
|
||||
@@ -96,6 +96,10 @@ export type FullyVerifiedBlock = {
|
||||
*/
|
||||
executionStatus: MaybeValidExecutionStatus;
|
||||
dataAvailabilityStatus: DataAvailabilityStatus;
|
||||
/**
|
||||
* Pre-computed indexed attestations from signature verification to avoid duplicate work
|
||||
*/
|
||||
indexedAttestations: IndexedAttestation[];
|
||||
/** Seen timestamp seconds */
|
||||
seenTimestampSec: number;
|
||||
};
|
||||
|
||||
@@ -7,7 +7,7 @@ import {
|
||||
computeEpochAtSlot,
|
||||
isStateValidatorsNodesPopulated,
|
||||
} from "@lodestar/state-transition";
|
||||
import {bellatrix, deneb} from "@lodestar/types";
|
||||
import {IndexedAttestation, bellatrix, deneb} from "@lodestar/types";
|
||||
import {Logger, toRootHex} from "@lodestar/utils";
|
||||
import type {BeaconChain} from "../chain.js";
|
||||
import {BlockError, BlockErrorCode} from "../errors/index.js";
|
||||
@@ -47,6 +47,7 @@ export async function verifyBlocksInEpoch(
|
||||
proposerBalanceDeltas: number[];
|
||||
segmentExecStatus: SegmentExecStatus;
|
||||
dataAvailabilityStatuses: DataAvailabilityStatus[];
|
||||
indexedAttestationsByBlock: IndexedAttestation[][];
|
||||
}> {
|
||||
const blocks = blockInputs.map((blockInput) => blockInput.getBlock());
|
||||
const lastBlock = blocks.at(-1);
|
||||
@@ -65,6 +66,9 @@ export async function verifyBlocksInEpoch(
|
||||
}
|
||||
}
|
||||
|
||||
// All blocks are in the same epoch
|
||||
const fork = this.config.getForkSeq(block0.message.slot);
|
||||
|
||||
// TODO: Skip in process chain segment
|
||||
// Retrieve preState from cache (regen)
|
||||
const preState0 = await this.regen
|
||||
@@ -92,6 +96,24 @@ export async function verifyBlocksInEpoch(
|
||||
const abortController = new AbortController();
|
||||
|
||||
try {
|
||||
// Start execution payload verification first (async request to execution client)
|
||||
const verifyExecutionPayloadsPromise =
|
||||
opts.skipVerifyExecutionPayload !== true
|
||||
? verifyBlocksExecutionPayload(this, parentBlock, blockInputs, preState0, abortController.signal, opts)
|
||||
: Promise.resolve({
|
||||
execAborted: null,
|
||||
executionStatuses: blocks.map((_blk) => ExecutionStatus.Syncing),
|
||||
mergeBlockFound: null,
|
||||
} as SegmentExecStatus);
|
||||
|
||||
// Store indexed attestations for each block to avoid recomputing them during import
|
||||
const indexedAttestationsByBlock: IndexedAttestation[][] = [];
|
||||
for (const [i, block] of blocks.entries()) {
|
||||
indexedAttestationsByBlock[i] = block.message.body.attestations.map((attestation) =>
|
||||
preState0.epochCtx.getIndexedAttestation(fork, attestation)
|
||||
);
|
||||
}
|
||||
|
||||
// batch all I/O operations to reduce overhead
|
||||
const [
|
||||
segmentExecStatus,
|
||||
@@ -99,14 +121,7 @@ export async function verifyBlocksInEpoch(
|
||||
{postStates, proposerBalanceDeltas, verifyStateTime},
|
||||
{verifySignaturesTime},
|
||||
] = await Promise.all([
|
||||
// Execution payloads
|
||||
opts.skipVerifyExecutionPayload !== true
|
||||
? verifyBlocksExecutionPayload(this, parentBlock, blockInputs, preState0, abortController.signal, opts)
|
||||
: Promise.resolve({
|
||||
execAborted: null,
|
||||
executionStatuses: blocks.map((_blk) => ExecutionStatus.Syncing),
|
||||
mergeBlockFound: null,
|
||||
} as SegmentExecStatus),
|
||||
verifyExecutionPayloadsPromise,
|
||||
|
||||
// data availability for the blobs
|
||||
verifyBlocksDataAvailability(blockInputs, abortController.signal),
|
||||
@@ -127,7 +142,15 @@ export async function verifyBlocksInEpoch(
|
||||
|
||||
// All signatures at once
|
||||
opts.skipVerifyBlockSignatures !== true
|
||||
? verifyBlocksSignatures(this.bls, this.logger, this.metrics, preState0, blocks, opts)
|
||||
? verifyBlocksSignatures(
|
||||
this.bls,
|
||||
this.logger,
|
||||
this.metrics,
|
||||
preState0,
|
||||
blocks,
|
||||
indexedAttestationsByBlock,
|
||||
opts
|
||||
)
|
||||
: Promise.resolve({verifySignaturesTime: Date.now()}),
|
||||
|
||||
// ideally we want to only persist blocks after verifying them however the reality is there are
|
||||
@@ -222,7 +245,7 @@ export async function verifyBlocksInEpoch(
|
||||
);
|
||||
}
|
||||
|
||||
return {postStates, dataAvailabilityStatuses, proposerBalanceDeltas, segmentExecStatus};
|
||||
return {postStates, dataAvailabilityStatuses, proposerBalanceDeltas, segmentExecStatus, indexedAttestationsByBlock};
|
||||
} finally {
|
||||
abortController.abort();
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import {CachedBeaconStateAllForks, getBlockSignatureSets} from "@lodestar/state-transition";
|
||||
import {SignedBeaconBlock} from "@lodestar/types";
|
||||
import {IndexedAttestation, SignedBeaconBlock} from "@lodestar/types";
|
||||
import {Logger} from "@lodestar/utils";
|
||||
import {Metrics} from "../../metrics/metrics.js";
|
||||
import {nextEventLoop} from "../../util/eventLoop.js";
|
||||
@@ -20,6 +20,7 @@ export async function verifyBlocksSignatures(
|
||||
metrics: Metrics | null,
|
||||
preState0: CachedBeaconStateAllForks,
|
||||
blocks: SignedBeaconBlock[],
|
||||
indexedAttestationsByBlock: IndexedAttestation[][],
|
||||
opts: ImportBlockOpts
|
||||
): Promise<{verifySignaturesTime: number}> {
|
||||
const isValidPromises: Promise<boolean>[] = [];
|
||||
@@ -37,7 +38,7 @@ export async function verifyBlocksSignatures(
|
||||
: //
|
||||
// Verify signatures per block to track which block is invalid
|
||||
bls.verifySignatureSets(
|
||||
getBlockSignatureSets(preState0, block, {
|
||||
getBlockSignatureSets(preState0, block, indexedAttestationsByBlock[i], {
|
||||
skipProposerSignature: opts.validProposerSignature,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -22,6 +22,7 @@ import {
|
||||
import {
|
||||
CachedBeaconStateAllForks,
|
||||
CachedBeaconStateAltair,
|
||||
CachedBeaconStateGloas,
|
||||
CachedBeaconStatePhase0,
|
||||
EffectiveBalanceIncrements,
|
||||
RootCache,
|
||||
@@ -486,7 +487,10 @@ export class AggregatedAttestationPool {
|
||||
consolidation.attData,
|
||||
inclusionDistance,
|
||||
stateEpoch,
|
||||
rootCache
|
||||
rootCache,
|
||||
ForkSeq[fork] >= ForkSeq.gloas
|
||||
? (state as CachedBeaconStateGloas).executionPayloadAvailability.toBoolArray()
|
||||
: null
|
||||
);
|
||||
|
||||
const weight =
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
import {Message} from "@libp2p/interface";
|
||||
// snappyjs is better for compression for smaller payloads
|
||||
import {compress, uncompress} from "snappyjs";
|
||||
import xxhashFactory from "xxhash-wasm";
|
||||
import {digest} from "@chainsafe/as-sha256";
|
||||
import {RPC} from "@chainsafe/libp2p-gossipsub/message";
|
||||
import {DataTransform} from "@chainsafe/libp2p-gossipsub/types";
|
||||
import snappyWasm from "@chainsafe/snappy-wasm";
|
||||
import {ForkName} from "@lodestar/params";
|
||||
import {intToBytes} from "@lodestar/utils";
|
||||
import {MESSAGE_DOMAIN_VALID_SNAPPY} from "./constants.js";
|
||||
import {Eth2GossipsubMetrics} from "./metrics.js";
|
||||
import {GossipTopicCache, getGossipSSZType} from "./topic.js";
|
||||
|
||||
// Load WASM
|
||||
@@ -16,6 +17,10 @@ const xxhash = await xxhashFactory();
|
||||
// Use salt to prevent msgId from being mined for collisions
|
||||
const h64Seed = BigInt(Math.floor(Math.random() * 1e9));
|
||||
|
||||
// create singleton snappy encoder + decoder
|
||||
const encoder = new snappyWasm.Encoder();
|
||||
const decoder = new snappyWasm.Decoder();
|
||||
|
||||
// Shared buffer to convert msgId to string
|
||||
const sharedMsgIdBuf = Buffer.alloc(20);
|
||||
|
||||
@@ -70,7 +75,8 @@ export function msgIdFn(gossipTopicCache: GossipTopicCache, msg: Message): Uint8
|
||||
export class DataTransformSnappy implements DataTransform {
|
||||
constructor(
|
||||
private readonly gossipTopicCache: GossipTopicCache,
|
||||
private readonly maxSizePerMessage: number
|
||||
private readonly maxSizePerMessage: number,
|
||||
private readonly metrics: Eth2GossipsubMetrics | null
|
||||
) {}
|
||||
|
||||
/**
|
||||
@@ -80,13 +86,15 @@ export class DataTransformSnappy implements DataTransform {
|
||||
* - `outboundTransform()`: compress snappy payload
|
||||
*/
|
||||
inboundTransform(topicStr: string, data: Uint8Array): Uint8Array {
|
||||
const uncompressedData = uncompress(data, this.maxSizePerMessage);
|
||||
// check uncompressed data length before we actually decompress
|
||||
const uncompressedDataLength = snappyWasm.decompress_len(data);
|
||||
if (uncompressedDataLength > this.maxSizePerMessage) {
|
||||
throw Error(`ssz_snappy decoded data length ${uncompressedDataLength} > ${this.maxSizePerMessage}`);
|
||||
}
|
||||
|
||||
// check uncompressed data length before we extract beacon block root, slot or
|
||||
// attestation data at later steps
|
||||
const uncompressedDataLength = uncompressedData.length;
|
||||
const topic = this.gossipTopicCache.getTopic(topicStr);
|
||||
const sszType = getGossipSSZType(topic);
|
||||
this.metrics?.dataTransform.inbound.inc({type: topic.type});
|
||||
|
||||
if (uncompressedDataLength < sszType.minSize) {
|
||||
throw Error(`ssz_snappy decoded data length ${uncompressedDataLength} < ${sszType.minSize}`);
|
||||
@@ -95,6 +103,10 @@ export class DataTransformSnappy implements DataTransform {
|
||||
throw Error(`ssz_snappy decoded data length ${uncompressedDataLength} > ${sszType.maxSize}`);
|
||||
}
|
||||
|
||||
// Only after sanity length checks, we can decompress the data
|
||||
// Using Buffer.alloc() instead of Buffer.allocUnsafe() to mitigate high GC pressure observed in some environments
|
||||
const uncompressedData = Buffer.alloc(uncompressedDataLength);
|
||||
decoder.decompress_into(data, uncompressedData);
|
||||
return uncompressedData;
|
||||
}
|
||||
|
||||
@@ -102,11 +114,16 @@ export class DataTransformSnappy implements DataTransform {
|
||||
* Takes the data to be published (a topic and associated data) transforms the data. The
|
||||
* transformed data will then be used to create a `RawGossipsubMessage` to be sent to peers.
|
||||
*/
|
||||
outboundTransform(_topicStr: string, data: Uint8Array): Uint8Array {
|
||||
outboundTransform(topicStr: string, data: Uint8Array): Uint8Array {
|
||||
const topic = this.gossipTopicCache.getTopic(topicStr);
|
||||
this.metrics?.dataTransform.outbound.inc({type: topic.type});
|
||||
if (data.length > this.maxSizePerMessage) {
|
||||
throw Error(`ssz_snappy encoded data length ${data.length} > ${this.maxSizePerMessage}`);
|
||||
}
|
||||
// No need to parse topic, everything is snappy compressed
|
||||
return compress(data);
|
||||
|
||||
// Using Buffer.alloc() instead of Buffer.allocUnsafe() to mitigate high GC pressure observed in some environments
|
||||
const compressedData = Buffer.alloc(snappyWasm.max_compress_len(data.length));
|
||||
const compressedLen = encoder.compress_into(data, compressedData);
|
||||
return compressedData.subarray(0, compressedLen);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -89,6 +89,13 @@ export class Eth2Gossipsub extends GossipSub {
|
||||
const gossipTopicCache = new GossipTopicCache(config);
|
||||
|
||||
const scoreParams = computeGossipPeerScoreParams({config, eth2Context: modules.eth2Context});
|
||||
let metrics: Eth2GossipsubMetrics | null = null;
|
||||
if (metricsRegister) {
|
||||
metrics = createEth2GossipsubMetrics(metricsRegister);
|
||||
metrics.gossipMesh.peersByType.addCollect(() =>
|
||||
this.onScrapeLodestarMetrics(metrics as Eth2GossipsubMetrics, networkConfig)
|
||||
);
|
||||
}
|
||||
|
||||
// Gossipsub parameters defined here:
|
||||
// https://github.com/ethereum/consensus-specs/blob/v1.1.10/specs/phase0/p2p-interface.md#the-gossip-domain-gossipsub
|
||||
@@ -116,7 +123,7 @@ export class Eth2Gossipsub extends GossipSub {
|
||||
fastMsgIdFn: fastMsgIdFn,
|
||||
msgIdFn: msgIdFn.bind(msgIdFn, gossipTopicCache),
|
||||
msgIdToStrFn: msgIdToStrFn,
|
||||
dataTransform: new DataTransformSnappy(gossipTopicCache, config.MAX_PAYLOAD_SIZE),
|
||||
dataTransform: new DataTransformSnappy(gossipTopicCache, config.MAX_PAYLOAD_SIZE, metrics),
|
||||
metricsRegister: metricsRegister as MetricsRegister | null,
|
||||
metricsTopicStrToLabel: metricsRegister
|
||||
? getMetricsTopicStrToLabel(networkConfig, {disableLightClientServer: opts.disableLightClientServer ?? false})
|
||||
@@ -141,11 +148,6 @@ export class Eth2Gossipsub extends GossipSub {
|
||||
this.events = events;
|
||||
this.gossipTopicCache = gossipTopicCache;
|
||||
|
||||
if (metricsRegister) {
|
||||
const metrics = createEth2GossipsubMetrics(metricsRegister);
|
||||
metrics.gossipMesh.peersByType.addCollect(() => this.onScrapeLodestarMetrics(metrics, networkConfig));
|
||||
}
|
||||
|
||||
this.addEventListener("gossipsub:message", this.onGossipsubMessage.bind(this));
|
||||
this.events.on(NetworkEvent.gossipMessageValidationResult, this.onValidationResult.bind(this));
|
||||
|
||||
|
||||
@@ -67,5 +67,17 @@ export function createEth2GossipsubMetrics(register: RegistryMetricCreator) {
|
||||
labelNames: ["subnet", "boundary"],
|
||||
}),
|
||||
},
|
||||
dataTransform: {
|
||||
inbound: register.counter<{type: GossipType}>({
|
||||
name: "lodestar_gossip_data_transform_inbound_total",
|
||||
help: "Total number of inbound data transforms by gossip type",
|
||||
labelNames: ["type"],
|
||||
}),
|
||||
outbound: register.counter<{type: GossipType}>({
|
||||
name: "lodestar_gossip_data_transform_outbound_total",
|
||||
help: "Total number of outbound data transforms by gossip type",
|
||||
labelNames: ["type"],
|
||||
}),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import assert from "node:assert";
|
||||
import {bench, describe, setBenchOpts} from "@chainsafe/benchmark";
|
||||
import {bench, describe} from "@chainsafe/benchmark";
|
||||
import {ssz} from "@lodestar/types";
|
||||
import {generateTestCachedBeaconStateOnlyValidators} from "../../../../../state-transition/test/perf/util.js";
|
||||
import {validateGossipAttestationsSameAttData} from "../../../../src/chain/validation/index.js";
|
||||
@@ -7,10 +7,6 @@ import {getAttDataFromAttestationSerialized} from "../../../../src/util/sszBytes
|
||||
import {getAttestationValidData} from "../../../utils/validationData/attestation.js";
|
||||
|
||||
describe("validate gossip attestation", () => {
|
||||
setBenchOpts({
|
||||
minMs: 30_000,
|
||||
});
|
||||
|
||||
const vc = 640_000;
|
||||
const stateSlot = 100;
|
||||
const state = generateTestCachedBeaconStateOnlyValidators({vc, slot: stateSlot});
|
||||
|
||||
@@ -2,6 +2,7 @@ import {randomBytes} from "node:crypto";
|
||||
import * as snappyRs from "snappy";
|
||||
import * as snappyJs from "snappyjs";
|
||||
import {bench, describe} from "@chainsafe/benchmark";
|
||||
import snappyWasm from "@chainsafe/snappy-wasm";
|
||||
|
||||
describe("network / gossip / snappy", () => {
|
||||
const msgLens = [
|
||||
@@ -15,6 +16,8 @@ describe("network / gossip / snappy", () => {
|
||||
10000, // 100000,
|
||||
];
|
||||
describe("compress", () => {
|
||||
const encoder = new snappyWasm.Encoder();
|
||||
|
||||
for (const msgLen of msgLens) {
|
||||
const uncompressed = randomBytes(msgLen);
|
||||
const RUNS_FACTOR = 1000;
|
||||
@@ -38,9 +41,33 @@ describe("network / gossip / snappy", () => {
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
bench({
|
||||
id: `${msgLen} bytes - compress - snappy-wasm`,
|
||||
runsFactor: RUNS_FACTOR,
|
||||
fn: () => {
|
||||
for (let i = 0; i < RUNS_FACTOR; i++) {
|
||||
encoder.compress(uncompressed);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
bench({
|
||||
id: `${msgLen} bytes - compress - snappy-wasm - prealloc`,
|
||||
runsFactor: RUNS_FACTOR,
|
||||
fn: () => {
|
||||
for (let i = 0; i < RUNS_FACTOR; i++) {
|
||||
let out = Buffer.alloc(snappyWasm.max_compress_len(uncompressed.length));
|
||||
const len = encoder.compress_into(uncompressed, out);
|
||||
out = out.subarray(0, len);
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
});
|
||||
describe("uncompress", () => {
|
||||
const decoder = new snappyWasm.Decoder();
|
||||
|
||||
for (const msgLen of msgLens) {
|
||||
const uncompressed = randomBytes(msgLen);
|
||||
const compressed = snappyJs.compress(uncompressed);
|
||||
@@ -65,6 +92,26 @@ describe("network / gossip / snappy", () => {
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
bench({
|
||||
id: `${msgLen} bytes - uncompress - snappy-wasm`,
|
||||
runsFactor: RUNS_FACTOR,
|
||||
fn: () => {
|
||||
for (let i = 0; i < RUNS_FACTOR; i++) {
|
||||
decoder.decompress(compressed);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
bench({
|
||||
id: `${msgLen} bytes - uncompress - snappy-wasm - prealloc`,
|
||||
runsFactor: RUNS_FACTOR,
|
||||
fn: () => {
|
||||
for (let i = 0; i < RUNS_FACTOR; i++) {
|
||||
decoder.decompress_into(compressed, Buffer.alloc(snappyWasm.decompress_len(compressed)));
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import {beforeAll, bench, describe} from "@chainsafe/benchmark";
|
||||
|
||||
describe("bytes utils", () => {
|
||||
/**
|
||||
* Enable this if you want to compare performance of Buffer vs Uint8Array operations. Not lodestar code so skipped by default.
|
||||
*/
|
||||
describe.skip("bytes utils", () => {
|
||||
const roots: Uint8Array[] = [];
|
||||
let buffers: Buffer[] = [];
|
||||
const count = 32;
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
import {bench, describe} from "@chainsafe/benchmark";
|
||||
|
||||
describe("dataview", () => {
|
||||
/**
|
||||
* Benchmark to compare DataView.getUint32 vs manual uint32 creation from Uint8Array.
|
||||
* Not lodestar code so skipped by default.
|
||||
*/
|
||||
describe.skip("dataview", () => {
|
||||
const data = Uint8Array.from([0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]);
|
||||
|
||||
bench({
|
||||
|
||||
@@ -1,6 +1,29 @@
|
||||
import {bench, describe, setBenchOpts} from "@chainsafe/benchmark";
|
||||
|
||||
describe("transfer bytes", () => {
|
||||
/**
|
||||
* This shows how fast the transfer of bytes between workers is compared to a simple copy.
|
||||
* Disable by default because it's not lodestar code.
|
||||
* transfer bytes
|
||||
✔ transfer serialized Status (84 B) 232504.1 ops/s 4.301000 us/op x1.968 39313 runs 0.320 s
|
||||
✔ copy serialized Status (84 B) 413736.0 ops/s 2.417000 us/op x2.029 79160 runs 0.344 s
|
||||
✔ transfer serialized SignedVoluntaryExit (112 B) 233644.9 ops/s 4.280000 us/op x1.912 65063 runs 0.535 s
|
||||
✔ copy serialized SignedVoluntaryExit (112 B) 434593.7 ops/s 2.301000 us/op x1.895 105903 runs 0.453 s
|
||||
✔ transfer serialized ProposerSlashing (416 B) 243013.4 ops/s 4.115000 us/op x1.800 38143 runs 0.321 s
|
||||
✔ copy serialized ProposerSlashing (416 B) 360360.4 ops/s 2.775000 us/op x2.202 85781 runs 0.444 s
|
||||
✔ transfer serialized Attestation (485 B) 238948.6 ops/s 4.185000 us/op x1.809 38342 runs 0.320 s
|
||||
✔ copy serialized Attestation (485 B) 438020.1 ops/s 2.283000 us/op x1.777 97506 runs 0.459 s
|
||||
✔ transfer serialized AttesterSlashing (33232 B) 228937.7 ops/s 4.368000 us/op x1.734 28449 runs 0.419 s
|
||||
✔ copy serialized AttesterSlashing (33232 B) 129148.9 ops/s 7.743000 us/op x1.797 21674 runs 0.310 s
|
||||
✔ transfer serialized Small SignedBeaconBlock (128000 B) 183553.6 ops/s 5.448000 us/op x1.328 10288 runs 0.408 s
|
||||
✔ copy serialized Small SignedBeaconBlock (128000 B) 11670.25 ops/s 85.68800 us/op x6.069 2868 runs 0.405 s
|
||||
✔ transfer serialized Avg SignedBeaconBlock (200000 B) 199561.0 ops/s 5.011000 us/op x1.172 12879 runs 0.727 s
|
||||
✔ copy serialized Avg SignedBeaconBlock (200000 B) 12585.90 ops/s 79.45400 us/op x4.288 2916 runs 0.408 s
|
||||
✔ transfer serialized BlobsSidecar (524380 B) 189501.6 ops/s 5.277000 us/op x1.025 1896 runs 0.474 s
|
||||
✔ copy serialized BlobsSidecar (524380 B) 5294.703 ops/s 188.8680 us/op x1.702 1268 runs 0.546 s
|
||||
✔ transfer serialized Big SignedBeaconBlock (1000000 B) 167084.4 ops/s 5.985000 us/op x1.134 1443 runs 0.514 s
|
||||
✔ copy serialized Big SignedBeaconBlock (1000000 B) 6337.457 ops/s 157.7920 us/op x1.246 1200 runs 0.521 s
|
||||
*/
|
||||
describe.skip("transfer bytes", () => {
|
||||
const sizes = [
|
||||
{size: 84, name: "Status"},
|
||||
{size: 112, name: "SignedVoluntaryExit"},
|
||||
|
||||
@@ -51,6 +51,7 @@ const epochTransitionFns: Record<string, EpochTransitionFn> = {
|
||||
const fork = state.config.getForkSeq(state.slot);
|
||||
epochFns.processProposerLookahead(fork, state as CachedBeaconStateFulu, epochTransitionCache);
|
||||
},
|
||||
builder_pending_payments: epochFns.processBuilderPendingPayments as EpochTransitionFn,
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import path from "node:path";
|
||||
import {ACTIVE_PRESET, ForkName} from "@lodestar/params";
|
||||
import {ACTIVE_PRESET, ForkName, ForkSeq} from "@lodestar/params";
|
||||
import {InputType} from "@lodestar/spec-test-util";
|
||||
import {
|
||||
BeaconStateAllForks,
|
||||
@@ -7,11 +7,12 @@ import {
|
||||
CachedBeaconStateBellatrix,
|
||||
CachedBeaconStateCapella,
|
||||
CachedBeaconStateElectra,
|
||||
CachedBeaconStateGloas,
|
||||
ExecutionPayloadStatus,
|
||||
getBlockRootAtSlot,
|
||||
} from "@lodestar/state-transition";
|
||||
import * as blockFns from "@lodestar/state-transition/block";
|
||||
import {AttesterSlashing, altair, bellatrix, capella, electra, phase0, ssz, sszTypesFor} from "@lodestar/types";
|
||||
import {AttesterSlashing, altair, bellatrix, capella, electra, gloas, phase0, ssz, sszTypesFor} from "@lodestar/types";
|
||||
import {createCachedBeaconStateTest} from "../../utils/cachedBeaconState.js";
|
||||
import {getConfig} from "../../utils/config.js";
|
||||
import {ethereumConsensusSpecsTests} from "../specTestVersioning.js";
|
||||
@@ -67,13 +68,24 @@ const operationFns: Record<string, BlockProcessFn<CachedBeaconStateAllForks>> =
|
||||
blockFns.processVoluntaryExit(fork, state, testCase.voluntary_exit);
|
||||
},
|
||||
|
||||
execution_payload: (state, testCase: {body: bellatrix.BeaconBlockBody; execution: {execution_valid: boolean}}) => {
|
||||
execution_payload: (
|
||||
state,
|
||||
testCase: {
|
||||
body: bellatrix.BeaconBlockBody | gloas.BeaconBlockBody;
|
||||
signed_envelope: gloas.SignedExecutionPayloadEnvelope;
|
||||
execution: {execution_valid: boolean};
|
||||
}
|
||||
) => {
|
||||
const fork = state.config.getForkSeq(state.slot);
|
||||
blockFns.processExecutionPayload(fork, state as CachedBeaconStateBellatrix, testCase.body, {
|
||||
executionPayloadStatus: testCase.execution.execution_valid
|
||||
? ExecutionPayloadStatus.valid
|
||||
: ExecutionPayloadStatus.invalid,
|
||||
});
|
||||
if (fork >= ForkSeq.gloas) {
|
||||
blockFns.processExecutionPayloadEnvelope(state as CachedBeaconStateGloas, testCase.signed_envelope, true);
|
||||
} else {
|
||||
blockFns.processExecutionPayload(fork, state as CachedBeaconStateBellatrix, testCase.body, {
|
||||
executionPayloadStatus: testCase.execution.execution_valid
|
||||
? ExecutionPayloadStatus.valid
|
||||
: ExecutionPayloadStatus.invalid,
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
bls_to_execution_change: (state, testCase: {address_change: capella.SignedBLSToExecutionChange}) => {
|
||||
@@ -95,7 +107,16 @@ const operationFns: Record<string, BlockProcessFn<CachedBeaconStateAllForks>> =
|
||||
},
|
||||
|
||||
consolidation_request: (state, testCase: {consolidation_request: electra.ConsolidationRequest}) => {
|
||||
blockFns.processConsolidationRequest(state as CachedBeaconStateElectra, testCase.consolidation_request);
|
||||
const fork = state.config.getForkSeq(state.slot);
|
||||
blockFns.processConsolidationRequest(fork, state as CachedBeaconStateElectra, testCase.consolidation_request);
|
||||
},
|
||||
|
||||
execution_payload_bid: (state, testCase: {block: gloas.BeaconBlock}) => {
|
||||
blockFns.processExecutionPayloadBid(state as CachedBeaconStateGloas, testCase.block);
|
||||
},
|
||||
|
||||
payload_attestation: (state, testCase: {payload_attestation: gloas.PayloadAttestation}) => {
|
||||
blockFns.processPayloadAttestation(state as CachedBeaconStateGloas, testCase.payload_attestation);
|
||||
},
|
||||
};
|
||||
|
||||
@@ -149,6 +170,8 @@ const operations: TestRunnerFn<OperationsTestCase, BeaconStateAllForks> = (fork,
|
||||
withdrawal_request: ssz.electra.WithdrawalRequest,
|
||||
deposit_request: ssz.electra.DepositRequest,
|
||||
consolidation_request: ssz.electra.ConsolidationRequest,
|
||||
payload_attestation: ssz.gloas.PayloadAttestation,
|
||||
signed_envelope: ssz.gloas.SignedExecutionPayloadEnvelope,
|
||||
},
|
||||
shouldError: (testCase) => testCase.post === undefined,
|
||||
getExpected: (testCase) => testCase.post,
|
||||
|
||||
@@ -14,7 +14,7 @@ import {DownloadTestsOptions} from "@lodestar/spec-test-util/downloadTests";
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
export const ethereumConsensusSpecsTests: DownloadTestsOptions = {
|
||||
specVersion: "v1.6.0-beta.2",
|
||||
specVersion: "v1.6.1",
|
||||
// Target directory is the host package root: 'packages/*/spec-tests'
|
||||
outputDir: path.join(__dirname, "../../spec-tests"),
|
||||
specTestsRepoUrl: "https://github.com/ethereum/consensus-specs",
|
||||
|
||||
@@ -69,7 +69,7 @@ export const defaultSkipOpts: SkipOpts = {
|
||||
/^electra\/light_client\/single_merkle_proof\/BeaconBlockBody.*/,
|
||||
/^fulu\/light_client\/single_merkle_proof\/BeaconBlockBody.*/,
|
||||
/^.+\/light_client\/data_collection\/.*/,
|
||||
/^gloas\/(?!.*ssz_static).*$/,
|
||||
/^gloas\/(finality|fork_choice|networking|sanity|transition)\/.*$/,
|
||||
/^gloas\/ssz_static\/ForkChoiceNode.*$/,
|
||||
],
|
||||
skippedTests: [],
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@chainsafe/lodestar",
|
||||
"version": "1.37.0",
|
||||
"version": "1.38.0",
|
||||
"description": "Command line interface for lodestar",
|
||||
"author": "ChainSafe Systems",
|
||||
"license": "Apache-2.0",
|
||||
@@ -69,17 +69,17 @@
|
||||
"@libp2p/crypto": "^5.0.15",
|
||||
"@libp2p/interface": "^2.7.0",
|
||||
"@libp2p/peer-id": "^5.1.0",
|
||||
"@lodestar/api": "^1.37.0",
|
||||
"@lodestar/beacon-node": "^1.37.0",
|
||||
"@lodestar/config": "^1.37.0",
|
||||
"@lodestar/db": "^1.37.0",
|
||||
"@lodestar/light-client": "^1.37.0",
|
||||
"@lodestar/logger": "^1.37.0",
|
||||
"@lodestar/params": "^1.37.0",
|
||||
"@lodestar/state-transition": "^1.37.0",
|
||||
"@lodestar/types": "^1.37.0",
|
||||
"@lodestar/utils": "^1.37.0",
|
||||
"@lodestar/validator": "^1.37.0",
|
||||
"@lodestar/api": "^1.38.0",
|
||||
"@lodestar/beacon-node": "^1.38.0",
|
||||
"@lodestar/config": "^1.38.0",
|
||||
"@lodestar/db": "^1.38.0",
|
||||
"@lodestar/light-client": "^1.38.0",
|
||||
"@lodestar/logger": "^1.38.0",
|
||||
"@lodestar/params": "^1.38.0",
|
||||
"@lodestar/state-transition": "^1.38.0",
|
||||
"@lodestar/types": "^1.38.0",
|
||||
"@lodestar/utils": "^1.38.0",
|
||||
"@lodestar/validator": "^1.38.0",
|
||||
"@multiformats/multiaddr": "^12.1.3",
|
||||
"deepmerge": "^4.3.1",
|
||||
"ethers": "^6.7.0",
|
||||
@@ -94,7 +94,7 @@
|
||||
"yargs": "^17.7.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@lodestar/test-utils": "^1.37.0",
|
||||
"@lodestar/test-utils": "^1.38.0",
|
||||
"@types/debug": "^4.1.7",
|
||||
"@types/inquirer": "^9.0.3",
|
||||
"@types/proper-lockfile": "^4.1.4",
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
export {holeskyChainConfig as chainConfig} from "@lodestar/config/networks";
|
||||
|
||||
export const depositContractDeployBlock = 0;
|
||||
export const genesisFileUrl = "https://media.githubusercontent.com/media/eth-clients/holesky/main/metadata/genesis.ssz";
|
||||
export const genesisStateRoot = "0x0ea3f6f9515823b59c863454675fefcd1d8b4f2dbe454db166206a41fda060a0";
|
||||
export const bootnodesFileUrl =
|
||||
"https://raw.githubusercontent.com/eth-clients/holesky/main/metadata/bootstrap_nodes.yaml";
|
||||
|
||||
export const bootEnrs = [
|
||||
"enr:-Ku4QFo-9q73SspYI8cac_4kTX7yF800VXqJW4Lj3HkIkb5CMqFLxciNHePmMt4XdJzHvhrCC5ADI4D_GkAsxGJRLnQBh2F0dG5ldHOIAAAAAAAAAACEZXRoMpAhnTT-AQFwAP__________gmlkgnY0gmlwhLKAiOmJc2VjcDI1NmsxoQORcM6e19T1T9gi7jxEZjk_sjVLGFscUNqAY9obgZaxbIN1ZHCCIyk",
|
||||
"enr:-Ku4QPG7F72mbKx3gEQEx07wpYYusGDh-ni6SNkLvOS-hhN-BxIggN7tKlmalb0L5JPoAfqD-akTZ-gX06hFeBEz4WoBh2F0dG5ldHOIAAAAAAAAAACEZXRoMpAhnTT-AQFwAP__________gmlkgnY0gmlwhJK-DYCJc2VjcDI1NmsxoQKLVXFOhp2uX6jeT0DvvDpPcU8FWMjQdR4wMuORMhpX24N1ZHCCIyk",
|
||||
"enr:-LK4QPxe-mDiSOtEB_Y82ozvxn9aQM07Ui8A-vQHNgYGMMthfsfOabaaTHhhJHFCBQQVRjBww_A5bM1rf8MlkJU_l68Eh2F0dG5ldHOIAADAAAAAAACEZXRoMpBpt9l0BAFwAAABAAAAAAAAgmlkgnY0gmlwhLKAiOmJc2VjcDI1NmsxoQJu6T9pclPObAzEVQ53DpVQqjadmVxdTLL-J3h9NFoCeIN0Y3CCIyiDdWRwgiMo",
|
||||
"enr:-Ly4QGbOw4xNel5EhmDsJJ-QhC9XycWtsetnWoZ0uRy381GHdHsNHJiCwDTOkb3S1Ade0SFQkWJX_pgb3g8Jfh93rvMBh2F0dG5ldHOIAAAAAAAAAACEZXRoMpBpt9l0BAFwAAABAAAAAAAAgmlkgnY0gmlwhJK-DYCJc2VjcDI1NmsxoQOxKv9sv3zKF8GDewgFGGHKP5HCZZpPpTrwl9eXKAWGxIhzeW5jbmV0cwCDdGNwgiMog3VkcIIjKA",
|
||||
"enr:-KO4QCi3ZY4TM5KL7bAG6laSYiYelDWu0crvUjCXlyc_cwEfUpMIuARuMJYGxWe-UYYpHEw_aBbZ1u-4tHQ8imyI5uaCAsGEZXRoMpBprg6ZBQFwAP__________gmlkgnY0gmlwhKyuI_mJc2VjcDI1NmsxoQLoFG5-vuNX6N49vnkTBaA3ZsBDF8B30DGqWOGtRGz5w4N0Y3CCIyiDdWRwgiMo",
|
||||
"enr:-Le4QLoE1wFHSlGcm48a9ZESb_MRLqPPu6G0vHqu4MaUcQNDHS69tsy-zkN0K6pglyzX8m24mkb-LtBcbjAYdP1uxm4BhGV0aDKQabfZdAQBcAAAAQAAAAAAAIJpZIJ2NIJpcIQ5gR6Wg2lwNpAgAUHQBwEQAAAAAAAAADR-iXNlY3AyNTZrMaEDPMSNdcL92uNIyCsS177Z6KTXlbZakQqxv3aQcWawNXeDdWRwgiMohHVkcDaCI4I",
|
||||
"enr:-KG4QC9Wm32mtzB5Fbj2ri2TEKglHmIWgvwTQCvNHBopuwpNAi1X6qOsBg_Z1-Bee-kfSrhzUQZSgDUyfH5outUprtoBgmlkgnY0gmlwhHEel3eDaXA2kP6AAAAAAAAAAlBW__4Srr-Jc2VjcDI1NmsxoQO7KE63Z4eSI55S1Yn7q9_xFkJ1Wt-a3LgiXuKGs19s0YN1ZHCCIyiEdWRwNoIjKA",
|
||||
];
|
||||
@@ -18,17 +18,15 @@ import * as chiado from "./chiado.js";
|
||||
import * as dev from "./dev.js";
|
||||
import * as ephemery from "./ephemery.js";
|
||||
import * as gnosis from "./gnosis.js";
|
||||
import * as holesky from "./holesky.js";
|
||||
import * as hoodi from "./hoodi.js";
|
||||
import * as mainnet from "./mainnet.js";
|
||||
import * as sepolia from "./sepolia.js";
|
||||
|
||||
export type NetworkName = "mainnet" | "dev" | "gnosis" | "sepolia" | "holesky" | "hoodi" | "chiado" | "ephemery";
|
||||
export type NetworkName = "mainnet" | "dev" | "gnosis" | "sepolia" | "hoodi" | "chiado" | "ephemery";
|
||||
export const networkNames: NetworkName[] = [
|
||||
"mainnet",
|
||||
"gnosis",
|
||||
"sepolia",
|
||||
"holesky",
|
||||
"hoodi",
|
||||
"chiado",
|
||||
"ephemery",
|
||||
@@ -66,8 +64,6 @@ export function getNetworkData(network: NetworkName): {
|
||||
return gnosis;
|
||||
case "sepolia":
|
||||
return sepolia;
|
||||
case "holesky":
|
||||
return holesky;
|
||||
case "hoodi":
|
||||
return hoodi;
|
||||
case "chiado":
|
||||
|
||||
@@ -30,10 +30,8 @@ describe("voluntaryExit using remote signer", () => {
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (externalSigner) {
|
||||
await externalSigner.container.stop();
|
||||
}
|
||||
afterAll(() => {
|
||||
externalSigner.stop();
|
||||
});
|
||||
|
||||
it("Perform a voluntary exit", async () => {
|
||||
|
||||
@@ -7,10 +7,10 @@ import {getTestdirPath} from "../../utils.js";
|
||||
|
||||
describe("config / beaconParams", () => {
|
||||
const GENESIS_FORK_VERSION_MAINNET = "0x00000000";
|
||||
const GENESIS_FORK_VERSION_HOLESKY = "0x01017000";
|
||||
const GENESIS_FORK_VERSION_HOODI = "0x10000910";
|
||||
const GENESIS_FORK_VERSION_FILE = "0x00009902";
|
||||
const GENESIS_FORK_VERSION_CLI = "0x00009903";
|
||||
const networkName = "holesky";
|
||||
const networkName = "hoodi";
|
||||
const paramsFilepath = getTestdirPath("./test-config.yaml");
|
||||
|
||||
const testCases: {
|
||||
@@ -31,7 +31,7 @@ describe("config / beaconParams", () => {
|
||||
network: networkName,
|
||||
additionalParamsCli: {},
|
||||
},
|
||||
GENESIS_FORK_VERSION: GENESIS_FORK_VERSION_HOLESKY,
|
||||
GENESIS_FORK_VERSION: GENESIS_FORK_VERSION_HOODI,
|
||||
},
|
||||
{
|
||||
id: "Params from network & file > returns file",
|
||||
|
||||
@@ -20,9 +20,9 @@ describe("paths / global", () => {
|
||||
},
|
||||
{
|
||||
id: "Network paths",
|
||||
args: {network: "holesky"},
|
||||
args: {network: "hoodi"},
|
||||
globalPaths: {
|
||||
dataDir: "/my-root-dir/lodestar/holesky",
|
||||
dataDir: "/my-root-dir/lodestar/hoodi",
|
||||
},
|
||||
},
|
||||
{
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@lodestar/config",
|
||||
"version": "1.37.0",
|
||||
"version": "1.38.0",
|
||||
"description": "Chain configuration required for lodestar",
|
||||
"author": "ChainSafe Systems",
|
||||
"license": "Apache-2.0",
|
||||
@@ -61,8 +61,8 @@
|
||||
],
|
||||
"dependencies": {
|
||||
"@chainsafe/ssz": "^1.2.2",
|
||||
"@lodestar/params": "^1.37.0",
|
||||
"@lodestar/types": "^1.37.0",
|
||||
"@lodestar/utils": "^1.37.0"
|
||||
"@lodestar/params": "^1.38.0",
|
||||
"@lodestar/types": "^1.38.0",
|
||||
"@lodestar/utils": "^1.38.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
import {fromHex as b} from "@lodestar/utils";
|
||||
import {chainConfig as mainnet} from "../configs/mainnet.js";
|
||||
import {ChainConfig} from "../types.js";
|
||||
|
||||
// Holesky beacon chain config:
|
||||
// https://github.com/eth-clients/holesky/blob/main/metadata/config.yaml
|
||||
|
||||
export const holeskyChainConfig: ChainConfig = {
|
||||
...mainnet,
|
||||
|
||||
CONFIG_NAME: "holesky",
|
||||
|
||||
// Genesis
|
||||
// ---------------------------------------------------------------
|
||||
MIN_GENESIS_ACTIVE_VALIDATOR_COUNT: 16384,
|
||||
// Sep-28-2023 11:55:00 +UTC
|
||||
MIN_GENESIS_TIME: 1695902100,
|
||||
GENESIS_DELAY: 300,
|
||||
GENESIS_FORK_VERSION: b("0x01017000"),
|
||||
|
||||
// Forking
|
||||
// ---------------------------------------------------------------
|
||||
// # Altair
|
||||
ALTAIR_FORK_VERSION: b("0x02017000"),
|
||||
ALTAIR_FORK_EPOCH: 0,
|
||||
// # Merge
|
||||
BELLATRIX_FORK_VERSION: b("0x03017000"),
|
||||
BELLATRIX_FORK_EPOCH: 0,
|
||||
TERMINAL_TOTAL_DIFFICULTY: BigInt("0"),
|
||||
// Capella
|
||||
CAPELLA_FORK_VERSION: b("0x04017000"),
|
||||
CAPELLA_FORK_EPOCH: 256,
|
||||
// Deneb
|
||||
DENEB_FORK_VERSION: b("0x05017000"),
|
||||
DENEB_FORK_EPOCH: 29696,
|
||||
// Electra
|
||||
ELECTRA_FORK_VERSION: b("0x06017000"),
|
||||
ELECTRA_FORK_EPOCH: 115968,
|
||||
// Fulu
|
||||
FULU_FORK_VERSION: b("0x07017000"),
|
||||
FULU_FORK_EPOCH: 165120,
|
||||
// Gloas
|
||||
GLOAS_FORK_VERSION: b("0x08017000"),
|
||||
GLOAS_FORK_EPOCH: Infinity,
|
||||
|
||||
// # 28,000,000,000 Gwei to ensure quicker ejection
|
||||
EJECTION_BALANCE: 28000000000,
|
||||
|
||||
// Deposit contract
|
||||
// ---------------------------------------------------------------
|
||||
DEPOSIT_CHAIN_ID: 17000,
|
||||
DEPOSIT_NETWORK_ID: 17000,
|
||||
DEPOSIT_CONTRACT_ADDRESS: b("0x4242424242424242424242424242424242424242"),
|
||||
|
||||
// Blob Scheduling
|
||||
// ---------------------------------------------------------------
|
||||
BLOB_SCHEDULE: [
|
||||
{
|
||||
EPOCH: 166400,
|
||||
MAX_BLOBS_PER_BLOCK: 15,
|
||||
},
|
||||
{
|
||||
EPOCH: 167936,
|
||||
MAX_BLOBS_PER_BLOCK: 21,
|
||||
},
|
||||
],
|
||||
};
|
||||
@@ -9,7 +9,7 @@ export type ChainConfig = {
|
||||
* Free-form short name of the network that this configuration applies to - known
|
||||
* canonical network names include:
|
||||
* * 'mainnet' - there can be only one
|
||||
* * 'holesky' - testnet
|
||||
* * 'hoodi' - testnet
|
||||
* Must match the regex: [a-z0-9\-]
|
||||
*/
|
||||
CONFIG_NAME: string;
|
||||
|
||||
@@ -2,7 +2,6 @@ import {ChainConfig} from "./chainConfig/index.js";
|
||||
import {chiadoChainConfig} from "./chainConfig/networks/chiado.js";
|
||||
import {ephemeryChainConfig} from "./chainConfig/networks/ephemery.js";
|
||||
import {gnosisChainConfig} from "./chainConfig/networks/gnosis.js";
|
||||
import {holeskyChainConfig} from "./chainConfig/networks/holesky.js";
|
||||
import {hoodiChainConfig} from "./chainConfig/networks/hoodi.js";
|
||||
import {mainnetChainConfig} from "./chainConfig/networks/mainnet.js";
|
||||
import {sepoliaChainConfig} from "./chainConfig/networks/sepolia.js";
|
||||
@@ -11,18 +10,16 @@ export {
|
||||
mainnetChainConfig,
|
||||
gnosisChainConfig,
|
||||
sepoliaChainConfig,
|
||||
holeskyChainConfig,
|
||||
hoodiChainConfig,
|
||||
chiadoChainConfig,
|
||||
ephemeryChainConfig,
|
||||
};
|
||||
|
||||
export type NetworkName = "mainnet" | "gnosis" | "sepolia" | "holesky" | "hoodi" | "chiado" | "ephemery";
|
||||
export type NetworkName = "mainnet" | "gnosis" | "sepolia" | "hoodi" | "chiado" | "ephemery";
|
||||
export const networksChainConfig: Record<NetworkName, ChainConfig> = {
|
||||
mainnet: mainnetChainConfig,
|
||||
gnosis: gnosisChainConfig,
|
||||
sepolia: sepoliaChainConfig,
|
||||
holesky: holeskyChainConfig,
|
||||
hoodi: hoodiChainConfig,
|
||||
chiado: chiadoChainConfig,
|
||||
ephemery: ephemeryChainConfig,
|
||||
@@ -46,10 +43,6 @@ export const genesisData: Record<NetworkName, GenesisData> = {
|
||||
genesisTime: 1655733600,
|
||||
genesisValidatorsRoot: "0xd8ea171f3c94aea21ebc42a1ed61052acf3f9209c00e4efbaaddac09ed9b8078",
|
||||
},
|
||||
holesky: {
|
||||
genesisTime: 1695902400,
|
||||
genesisValidatorsRoot: "0x9143aa7c615a7f7115e2b6aac319c03529df8242ae705fba9df39b79c59fa8b1",
|
||||
},
|
||||
hoodi: {
|
||||
genesisTime: 1742213400,
|
||||
genesisValidatorsRoot: "0x212f13fc4df078b6cb7db228f1c8307566dcecf900867401a92023d7ba99cb5f",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@lodestar/db",
|
||||
"version": "1.37.0",
|
||||
"version": "1.38.0",
|
||||
"description": "DB modules of Lodestar",
|
||||
"author": "ChainSafe Systems",
|
||||
"homepage": "https://github.com/ChainSafe/lodestar#readme",
|
||||
@@ -44,12 +44,12 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@chainsafe/ssz": "^1.2.2",
|
||||
"@lodestar/config": "^1.37.0",
|
||||
"@lodestar/utils": "^1.37.0",
|
||||
"@lodestar/config": "^1.38.0",
|
||||
"@lodestar/utils": "^1.38.0",
|
||||
"classic-level": "^1.4.1",
|
||||
"it-all": "^3.0.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@lodestar/logger": "^1.37.0"
|
||||
"@lodestar/logger": "^1.38.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"bugs": {
|
||||
"url": "https://github.com/ChainSafe/lodestar/issues"
|
||||
},
|
||||
"version": "1.37.0",
|
||||
"version": "1.38.0",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": {
|
||||
@@ -40,10 +40,10 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@chainsafe/blst": "^2.2.0",
|
||||
"@lodestar/config": "^1.37.0",
|
||||
"@lodestar/params": "^1.37.0",
|
||||
"@lodestar/reqresp": "^1.37.0",
|
||||
"@lodestar/types": "^1.37.0",
|
||||
"@lodestar/config": "^1.38.0",
|
||||
"@lodestar/params": "^1.38.0",
|
||||
"@lodestar/reqresp": "^1.38.0",
|
||||
"@lodestar/types": "^1.38.0",
|
||||
"uint8arraylist": "^2.4.7"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ import {type FileHandle, open} from "node:fs/promises";
|
||||
import {basename} from "node:path";
|
||||
import {PublicKey, Signature, verify} from "@chainsafe/blst";
|
||||
import {ChainForkConfig, createCachedGenesis} from "@lodestar/config";
|
||||
import {DOMAIN_BEACON_PROPOSER, SLOTS_PER_HISTORICAL_ROOT} from "@lodestar/params";
|
||||
import {DOMAIN_BEACON_PROPOSER, GENESIS_SLOT, SLOTS_PER_HISTORICAL_ROOT} from "@lodestar/params";
|
||||
import {BeaconState, SignedBeaconBlock, Slot, ssz} from "@lodestar/types";
|
||||
import {E2STORE_HEADER_SIZE, EntryType, readEntry, readVersion} from "../e2s.ts";
|
||||
import {snappyUncompress} from "../util.ts";
|
||||
@@ -180,6 +180,10 @@ export class EraReader {
|
||||
if (Buffer.compare(blockRoot, state.blockRoots[slot % SLOTS_PER_HISTORICAL_ROOT]) !== 0) {
|
||||
throw new Error(`Block root mismatch at slot ${slot}`);
|
||||
}
|
||||
// genesis block doesn't have valid signature
|
||||
if (slot === GENESIS_SLOT) {
|
||||
continue;
|
||||
}
|
||||
const msg = ssz.phase0.SigningData.hashTreeRoot({
|
||||
objectRoot: blockRoot,
|
||||
domain: cachedGenesis.getDomain(slot, DOMAIN_BEACON_PROPOSER),
|
||||
|
||||
@@ -10,7 +10,7 @@ import {readUint48} from "../util.ts";
|
||||
* Format: <config-name>-<era-number>-<short-historical-root>.era
|
||||
*/
|
||||
export interface EraFileName {
|
||||
/** CONFIG_NAME field of runtime config (mainnet, sepolia, holesky, etc.) */
|
||||
/** CONFIG_NAME field of runtime config (mainnet, sepolia, hoodi, etc.) */
|
||||
configName: string;
|
||||
/** Number of the first era stored in file, 5-digit zero-padded (00000, 00001, etc.) */
|
||||
eraNumber: number;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@lodestar/flare",
|
||||
"version": "1.37.0",
|
||||
"version": "1.38.0",
|
||||
"description": "Beacon chain debugging tool",
|
||||
"author": "ChainSafe Systems",
|
||||
"license": "Apache-2.0",
|
||||
@@ -50,12 +50,12 @@
|
||||
"dependencies": {
|
||||
"@chainsafe/bls-keygen": "^0.4.0",
|
||||
"@chainsafe/blst": "^2.2.0",
|
||||
"@lodestar/api": "^1.37.0",
|
||||
"@lodestar/config": "^1.37.0",
|
||||
"@lodestar/params": "^1.37.0",
|
||||
"@lodestar/state-transition": "^1.37.0",
|
||||
"@lodestar/types": "^1.37.0",
|
||||
"@lodestar/utils": "^1.37.0",
|
||||
"@lodestar/api": "^1.38.0",
|
||||
"@lodestar/config": "^1.38.0",
|
||||
"@lodestar/params": "^1.38.0",
|
||||
"@lodestar/state-transition": "^1.38.0",
|
||||
"@lodestar/types": "^1.38.0",
|
||||
"@lodestar/utils": "^1.38.0",
|
||||
"source-map-support": "^0.5.21",
|
||||
"yargs": "^17.7.1"
|
||||
},
|
||||
|
||||
@@ -19,7 +19,7 @@ export const selfSlashAttester: CliCommand<SelfSlashArgs, Record<never, never>,
|
||||
describe: "Self slash validators of a provided mnemonic with AttesterSlashing",
|
||||
examples: [
|
||||
{
|
||||
command: "self-slash-proposer --network holesky",
|
||||
command: "self-slash-attester --network hoodi",
|
||||
description: "Self slash validators of a provided mnemonic",
|
||||
},
|
||||
],
|
||||
|
||||
@@ -19,7 +19,7 @@ export const selfSlashProposer: CliCommand<SelfSlashArgs, Record<never, never>,
|
||||
describe: "Self slash validators of a provided mnemonic with ProposerSlashing",
|
||||
examples: [
|
||||
{
|
||||
command: "self-slash-proposer --network holesky",
|
||||
command: "self-slash-proposer --network hoodi",
|
||||
description: "Self slash validators of a provided mnemonic",
|
||||
},
|
||||
],
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"bugs": {
|
||||
"url": "https://github.com/ChainSafe/lodestar/issues"
|
||||
},
|
||||
"version": "1.37.0",
|
||||
"version": "1.38.0",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": {
|
||||
@@ -40,11 +40,11 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@chainsafe/ssz": "^1.2.2",
|
||||
"@lodestar/config": "^1.37.0",
|
||||
"@lodestar/params": "^1.37.0",
|
||||
"@lodestar/state-transition": "^1.37.0",
|
||||
"@lodestar/types": "^1.37.0",
|
||||
"@lodestar/utils": "^1.37.0"
|
||||
"@lodestar/config": "^1.38.0",
|
||||
"@lodestar/params": "^1.38.0",
|
||||
"@lodestar/state-transition": "^1.38.0",
|
||||
"@lodestar/types": "^1.38.0",
|
||||
"@lodestar/utils": "^1.38.0"
|
||||
},
|
||||
"keywords": [
|
||||
"ethereum",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import {beforeAll, bench, describe, setBenchOpts} from "@chainsafe/benchmark";
|
||||
import {beforeAll, bench, describe} from "@chainsafe/benchmark";
|
||||
import {EffectiveBalanceIncrements, getEffectiveBalanceIncrementsZeroed} from "@lodestar/state-transition";
|
||||
import {computeDeltas} from "../../../src/protoArray/computeDeltas.js";
|
||||
import {NULL_VOTE_INDEX} from "../../../src/protoArray/interface.js";
|
||||
@@ -26,11 +26,6 @@ describe("computeDeltas", () => {
|
||||
2 * 60 * 1000
|
||||
);
|
||||
|
||||
setBenchOpts({
|
||||
minMs: 10 * 1000,
|
||||
maxMs: 10 * 1000,
|
||||
});
|
||||
|
||||
for (const inainactiveValidatorsPercentage of inactiveValidatorsPercentages) {
|
||||
if (inainactiveValidatorsPercentage < 0 || inainactiveValidatorsPercentage > 1) {
|
||||
throw new Error("inactiveValidatorsPercentage must be between 0 and 1");
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"bugs": {
|
||||
"url": "https://github.com/ChainSafe/lodestar/issues"
|
||||
},
|
||||
"version": "1.37.0",
|
||||
"version": "1.38.0",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": {
|
||||
@@ -67,11 +67,11 @@
|
||||
"@chainsafe/blst": "^2.2.0",
|
||||
"@chainsafe/persistent-merkle-tree": "^1.2.1",
|
||||
"@chainsafe/ssz": "^1.2.2",
|
||||
"@lodestar/api": "^1.37.0",
|
||||
"@lodestar/config": "^1.37.0",
|
||||
"@lodestar/params": "^1.37.0",
|
||||
"@lodestar/types": "^1.37.0",
|
||||
"@lodestar/utils": "^1.37.0",
|
||||
"@lodestar/api": "^1.38.0",
|
||||
"@lodestar/config": "^1.38.0",
|
||||
"@lodestar/params": "^1.38.0",
|
||||
"@lodestar/types": "^1.38.0",
|
||||
"@lodestar/utils": "^1.38.0",
|
||||
"mitt": "^3.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"bugs": {
|
||||
"url": "https://github.com/ChainSafe/lodestar/issues"
|
||||
},
|
||||
"version": "1.37.0",
|
||||
"version": "1.38.0",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": {
|
||||
@@ -62,14 +62,14 @@
|
||||
},
|
||||
"types": "lib/index.d.ts",
|
||||
"dependencies": {
|
||||
"@lodestar/utils": "^1.37.0",
|
||||
"@lodestar/utils": "^1.38.0",
|
||||
"winston": "^3.8.2",
|
||||
"winston-daily-rotate-file": "^4.7.1",
|
||||
"winston-transport": "^4.5.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@chainsafe/threads": "^1.11.3",
|
||||
"@lodestar/test-utils": "^1.37.0",
|
||||
"@lodestar/test-utils": "^1.38.0",
|
||||
"@types/triple-beam": "^1.3.2",
|
||||
"triple-beam": "^1.3.0"
|
||||
},
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
[](https://github.com/ethereum/consensus-specs/releases/tag/v1.5.0)
|
||||

|
||||

|
||||

|
||||
|
||||
> This package is part of [ChainSafe's Lodestar](https://lodestar.chainsafe.io) project
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@lodestar/params",
|
||||
"version": "1.37.0",
|
||||
"version": "1.38.0",
|
||||
"description": "Chain parameters required for lodestar",
|
||||
"author": "ChainSafe Systems",
|
||||
"license": "Apache-2.0",
|
||||
|
||||
@@ -8,7 +8,7 @@ import {loadConfigYaml} from "../yaml.js";
|
||||
// Not e2e, but slow. Run with e2e tests
|
||||
|
||||
/** https://github.com/ethereum/consensus-specs/releases */
|
||||
const specConfigCommit = "v1.6.0-beta.2";
|
||||
const specConfigCommit = "v1.6.1";
|
||||
/**
|
||||
* Fields that we filter from local config when doing comparison.
|
||||
* Ideally this should be empty as it is not spec compliant
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
[](https://discord.gg/aMxzVcr)
|
||||
[](https://github.com/ethereum/beacon-APIs/releases/tag/v3.1.0)
|
||||

|
||||

|
||||

|
||||
|
||||
> This package is part of [ChainSafe's Lodestar](https://lodestar.chainsafe.io) project
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"bugs": {
|
||||
"url": "https://github.com/ChainSafe/lodestar/issues"
|
||||
},
|
||||
"version": "1.37.0",
|
||||
"version": "1.38.0",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": {
|
||||
@@ -59,13 +59,13 @@
|
||||
"@ethereumjs/tx": "^4.1.2",
|
||||
"@ethereumjs/util": "^8.0.6",
|
||||
"@ethereumjs/vm": "^6.4.2",
|
||||
"@lodestar/api": "^1.37.0",
|
||||
"@lodestar/config": "^1.37.0",
|
||||
"@lodestar/light-client": "^1.37.0",
|
||||
"@lodestar/logger": "^1.37.0",
|
||||
"@lodestar/params": "^1.37.0",
|
||||
"@lodestar/types": "^1.37.0",
|
||||
"@lodestar/utils": "^1.37.0",
|
||||
"@lodestar/api": "^1.38.0",
|
||||
"@lodestar/config": "^1.38.0",
|
||||
"@lodestar/light-client": "^1.38.0",
|
||||
"@lodestar/logger": "^1.38.0",
|
||||
"@lodestar/params": "^1.38.0",
|
||||
"@lodestar/types": "^1.38.0",
|
||||
"@lodestar/utils": "^1.38.0",
|
||||
"ethereum-cryptography": "^2.0.0",
|
||||
"find-up": "^6.3.0",
|
||||
"http-proxy": "^1.18.1",
|
||||
@@ -74,7 +74,7 @@
|
||||
"yargs": "^17.7.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@lodestar/test-utils": "^1.37.0",
|
||||
"@lodestar/test-utils": "^1.38.0",
|
||||
"@types/http-proxy": "^1.17.10",
|
||||
"@types/yargs": "^17.0.24",
|
||||
"axios": "^1.3.4",
|
||||
|
||||
@@ -49,7 +49,7 @@ export function getChainCommon(network: string): Common {
|
||||
switch (network) {
|
||||
case "mainnet":
|
||||
case "sepolia":
|
||||
case "holesky":
|
||||
case "hoodi":
|
||||
case "ephemery":
|
||||
// TODO: Not sure how to detect the fork during runtime
|
||||
return new Common({chain: network, hardfork: Hardfork.Shanghai});
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
[](https://discord.gg/aMxzVcr)
|
||||
[](https://github.com/ethereum/beacon-APIs/releases/tag/v3.1.0)
|
||||

|
||||

|
||||

|
||||
|
||||
> This package is part of [ChainSafe's Lodestar](https://lodestar.chainsafe.io) project
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"bugs": {
|
||||
"url": "https://github.com/ChainSafe/lodestar/issues"
|
||||
},
|
||||
"version": "1.37.0",
|
||||
"version": "1.38.0",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": {
|
||||
@@ -46,9 +46,9 @@
|
||||
"dependencies": {
|
||||
"@chainsafe/fast-crc32c": "^4.2.0",
|
||||
"@libp2p/interface": "^2.7.0",
|
||||
"@lodestar/config": "^1.37.0",
|
||||
"@lodestar/params": "^1.37.0",
|
||||
"@lodestar/utils": "^1.37.0",
|
||||
"@lodestar/config": "^1.38.0",
|
||||
"@lodestar/params": "^1.38.0",
|
||||
"@lodestar/utils": "^1.38.0",
|
||||
"it-all": "^3.0.4",
|
||||
"it-pipe": "^3.0.1",
|
||||
"snappy": "^7.2.2",
|
||||
@@ -57,8 +57,8 @@
|
||||
"uint8arraylist": "^2.4.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@lodestar/logger": "^1.37.0",
|
||||
"@lodestar/types": "^1.37.0",
|
||||
"@lodestar/logger": "^1.38.0",
|
||||
"@lodestar/types": "^1.38.0",
|
||||
"libp2p": "2.9.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@lodestar/spec-test-util",
|
||||
"version": "1.37.0",
|
||||
"version": "1.38.0",
|
||||
"description": "Spec test suite generator from yaml test files",
|
||||
"author": "ChainSafe Systems",
|
||||
"license": "Apache-2.0",
|
||||
@@ -54,7 +54,7 @@
|
||||
"blockchain"
|
||||
],
|
||||
"dependencies": {
|
||||
"@lodestar/utils": "^1.37.0",
|
||||
"@lodestar/utils": "^1.38.0",
|
||||
"rimraf": "^4.4.1",
|
||||
"snappyjs": "^0.7.0",
|
||||
"vitest": "^4.0.7"
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"bugs": {
|
||||
"url": "https://github.com/ChainSafe/lodestar/issues"
|
||||
},
|
||||
"version": "1.37.0",
|
||||
"version": "1.38.0",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": {
|
||||
@@ -62,10 +62,10 @@
|
||||
"@chainsafe/pubkey-index-map": "^3.0.0",
|
||||
"@chainsafe/ssz": "^1.2.2",
|
||||
"@chainsafe/swap-or-not-shuffle": "^1.2.1",
|
||||
"@lodestar/config": "^1.37.0",
|
||||
"@lodestar/params": "^1.37.0",
|
||||
"@lodestar/types": "^1.37.0",
|
||||
"@lodestar/utils": "^1.37.0",
|
||||
"@lodestar/config": "^1.38.0",
|
||||
"@lodestar/params": "^1.38.0",
|
||||
"@lodestar/types": "^1.38.0",
|
||||
"@lodestar/utils": "^1.38.0",
|
||||
"bigint-buffer": "^1.1.5"
|
||||
},
|
||||
"keywords": [
|
||||
|
||||
@@ -1,14 +1,22 @@
|
||||
import {ForkSeq} from "@lodestar/params";
|
||||
import {ForkPostGloas, ForkSeq} from "@lodestar/params";
|
||||
import {BeaconBlock, BlindedBeaconBlock, altair, capella} from "@lodestar/types";
|
||||
import {BeaconStateTransitionMetrics} from "../metrics.js";
|
||||
import {CachedBeaconStateAllForks, CachedBeaconStateBellatrix, CachedBeaconStateCapella} from "../types.js";
|
||||
import {
|
||||
CachedBeaconStateAllForks,
|
||||
CachedBeaconStateBellatrix,
|
||||
CachedBeaconStateCapella,
|
||||
CachedBeaconStateGloas,
|
||||
} from "../types.js";
|
||||
import {getFullOrBlindedPayload, isExecutionEnabled} from "../util/execution.js";
|
||||
import {BlockExternalData, DataAvailabilityStatus} from "./externalData.js";
|
||||
import {processBlobKzgCommitments} from "./processBlobKzgCommitments.js";
|
||||
import {processBlockHeader} from "./processBlockHeader.js";
|
||||
import {processEth1Data} from "./processEth1Data.js";
|
||||
import {processExecutionPayload} from "./processExecutionPayload.js";
|
||||
import {processExecutionPayloadBid} from "./processExecutionPayloadBid.ts";
|
||||
import {processExecutionPayloadEnvelope} from "./processExecutionPayloadEnvelope.ts";
|
||||
import {processOperations} from "./processOperations.js";
|
||||
import {processPayloadAttestation} from "./processPayloadAttestation.ts";
|
||||
import {processRandao} from "./processRandao.js";
|
||||
import {processSyncAggregate} from "./processSyncCommittee.js";
|
||||
import {processWithdrawals} from "./processWithdrawals.js";
|
||||
@@ -22,6 +30,9 @@ export {
|
||||
processEth1Data,
|
||||
processSyncAggregate,
|
||||
processWithdrawals,
|
||||
processExecutionPayloadBid,
|
||||
processPayloadAttestation,
|
||||
processExecutionPayloadEnvelope,
|
||||
};
|
||||
|
||||
export * from "./externalData.js";
|
||||
@@ -41,23 +52,33 @@ export function processBlock(
|
||||
|
||||
processBlockHeader(state, block);
|
||||
|
||||
if (fork >= ForkSeq.gloas) {
|
||||
// After gloas, processWithdrawals does not take a payload parameter
|
||||
processWithdrawals(fork, state as CachedBeaconStateGloas);
|
||||
} else if (fork >= ForkSeq.capella) {
|
||||
const fullOrBlindedPayload = getFullOrBlindedPayload(block);
|
||||
processWithdrawals(
|
||||
fork,
|
||||
state as CachedBeaconStateCapella,
|
||||
fullOrBlindedPayload as capella.FullOrBlindedExecutionPayload
|
||||
);
|
||||
}
|
||||
|
||||
// The call to the process_execution_payload must happen before the call to the process_randao as the former depends
|
||||
// on the randao_mix computed with the reveal of the previous block.
|
||||
if (fork >= ForkSeq.bellatrix && isExecutionEnabled(state as CachedBeaconStateBellatrix, block)) {
|
||||
const fullOrBlindedPayload = getFullOrBlindedPayload(block);
|
||||
// TODO Deneb: Allow to disable withdrawals for interop testing
|
||||
// https://github.com/ethereum/consensus-specs/blob/b62c9e877990242d63aa17a2a59a49bc649a2f2e/specs/eip4844/beacon-chain.md#disabling-withdrawals
|
||||
if (fork >= ForkSeq.capella) {
|
||||
processWithdrawals(
|
||||
fork,
|
||||
state as CachedBeaconStateCapella,
|
||||
fullOrBlindedPayload as capella.FullOrBlindedExecutionPayload
|
||||
);
|
||||
}
|
||||
|
||||
// TODO GLOAS: We call processExecutionPayload somewhere else post-gloas
|
||||
if (
|
||||
fork >= ForkSeq.bellatrix &&
|
||||
fork < ForkSeq.gloas &&
|
||||
isExecutionEnabled(state as CachedBeaconStateBellatrix, block)
|
||||
) {
|
||||
processExecutionPayload(fork, state as CachedBeaconStateBellatrix, block.body, externalData);
|
||||
}
|
||||
|
||||
if (fork >= ForkSeq.gloas) {
|
||||
processExecutionPayloadBid(state as CachedBeaconStateGloas, block as BeaconBlock<ForkPostGloas>);
|
||||
}
|
||||
|
||||
processRandao(state, block, verifySignatures);
|
||||
processEth1Data(state, block.body.eth1Data);
|
||||
processOperations(fork, state, block.body, opts, metrics);
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
import {gloas} from "@lodestar/types";
|
||||
import {getIndexedPayloadAttestationSignatureSet} from "../signatureSets/index.ts";
|
||||
import {CachedBeaconStateGloas} from "../types.js";
|
||||
import {verifySignatureSet} from "../util/index.ts";
|
||||
|
||||
export function isValidIndexedPayloadAttestation(
|
||||
state: CachedBeaconStateGloas,
|
||||
indexedPayloadAttestation: gloas.IndexedPayloadAttestation,
|
||||
verifySignature: boolean
|
||||
): boolean {
|
||||
const indices = indexedPayloadAttestation.attestingIndices;
|
||||
const isSorted = indices.every((val, i, arr) => i === 0 || arr[i - 1] <= val);
|
||||
|
||||
if (indices.length === 0 || !isSorted) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (verifySignature) {
|
||||
return verifySignatureSet(getIndexedPayloadAttestationSignatureSet(state, indexedPayloadAttestation));
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
@@ -86,7 +86,11 @@ export function validateAttestation(fork: ForkSeq, state: CachedBeaconStateAllFo
|
||||
}
|
||||
|
||||
if (fork >= ForkSeq.electra) {
|
||||
assert.equal(data.index, 0, `AttestationData.index must be zero: index=${data.index}`);
|
||||
if (fork >= ForkSeq.gloas) {
|
||||
assert.lt(data.index, 2, `AttestationData.index must be 0 or 1: index=${data.index}`);
|
||||
} else {
|
||||
assert.equal(data.index, 0, `AttestationData.index must be 0: index=${data.index}`);
|
||||
}
|
||||
const attestationElectra = attestation as electra.Attestation;
|
||||
const committeeIndices = attestationElectra.committeeBits.getTrueBitIndexes();
|
||||
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import {byteArrayEquals} from "@chainsafe/ssz";
|
||||
import {
|
||||
EFFECTIVE_BALANCE_INCREMENT,
|
||||
ForkSeq,
|
||||
MIN_ATTESTATION_INCLUSION_DELAY,
|
||||
PROPOSER_WEIGHT,
|
||||
SLOTS_PER_EPOCH,
|
||||
SLOTS_PER_HISTORICAL_ROOT,
|
||||
TIMELY_HEAD_FLAG_INDEX,
|
||||
TIMELY_HEAD_WEIGHT,
|
||||
TIMELY_SOURCE_FLAG_INDEX,
|
||||
@@ -16,7 +18,8 @@ import {Attestation, Epoch, phase0} from "@lodestar/types";
|
||||
import {intSqrt} from "@lodestar/utils";
|
||||
import {BeaconStateTransitionMetrics} from "../metrics.js";
|
||||
import {getAttestationWithIndicesSignatureSet} from "../signatureSets/indexedAttestation.js";
|
||||
import {CachedBeaconStateAltair} from "../types.js";
|
||||
import {CachedBeaconStateAltair, CachedBeaconStateGloas} from "../types.js";
|
||||
import {isAttestationSameSlot, isAttestationSameSlotRootCache} from "../util/gloas.ts";
|
||||
import {increaseBalance, verifySignatureSet} from "../util/index.js";
|
||||
import {RootCache} from "../util/rootCache.js";
|
||||
import {checkpointToStr, isTimelyTarget, validateAttestation} from "./processAttestationPhase0.js";
|
||||
@@ -31,7 +34,7 @@ const SLOTS_PER_EPOCH_SQRT = intSqrt(SLOTS_PER_EPOCH);
|
||||
|
||||
export function processAttestationsAltair(
|
||||
fork: ForkSeq,
|
||||
state: CachedBeaconStateAltair,
|
||||
state: CachedBeaconStateAltair | CachedBeaconStateGloas,
|
||||
attestations: Attestation[],
|
||||
verifySignature = true,
|
||||
metrics?: BeaconStateTransitionMetrics | null
|
||||
@@ -46,6 +49,9 @@ export function processAttestationsAltair(
|
||||
let proposerReward = 0;
|
||||
let newSeenAttesters = 0;
|
||||
let newSeenAttestersEffectiveBalance = 0;
|
||||
|
||||
const builderWeightMap: Map<number, number> = new Map();
|
||||
|
||||
for (const attestation of attestations) {
|
||||
const data = attestation.data;
|
||||
|
||||
@@ -66,13 +72,16 @@ export function processAttestationsAltair(
|
||||
|
||||
const inCurrentEpoch = data.target.epoch === currentEpoch;
|
||||
const epochParticipation = inCurrentEpoch ? state.currentEpochParticipation : state.previousEpochParticipation;
|
||||
// Count how much additional weight added to current or previous epoch's builder pending payment (in ETH increment)
|
||||
let paymentWeightToAdd = 0;
|
||||
|
||||
const flagsAttestation = getAttestationParticipationStatus(
|
||||
fork,
|
||||
data,
|
||||
stateSlot - data.slot,
|
||||
epochCtx.epoch,
|
||||
rootCache
|
||||
rootCache,
|
||||
fork >= ForkSeq.gloas ? (state as CachedBeaconStateGloas).executionPayloadAvailability.toBoolArray() : null
|
||||
);
|
||||
|
||||
// For each participant, update their participation
|
||||
@@ -121,12 +130,35 @@ export function processAttestationsAltair(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (fork >= ForkSeq.gloas && flagsNewSet !== 0 && isAttestationSameSlot(state as CachedBeaconStateGloas, data)) {
|
||||
paymentWeightToAdd += effectiveBalanceIncrements[validatorIndex];
|
||||
}
|
||||
}
|
||||
|
||||
// Do the discrete math inside the loop to ensure a deterministic result
|
||||
const totalIncrements = totalBalanceIncrementsWithWeight;
|
||||
const proposerRewardNumerator = totalIncrements * state.epochCtx.baseRewardPerIncrement;
|
||||
proposerReward += Math.floor(proposerRewardNumerator / PROPOSER_REWARD_DOMINATOR);
|
||||
|
||||
if (fork >= ForkSeq.gloas) {
|
||||
const builderPendingPaymentIndex = inCurrentEpoch
|
||||
? SLOTS_PER_EPOCH + (data.slot % SLOTS_PER_EPOCH)
|
||||
: data.slot % SLOTS_PER_EPOCH;
|
||||
|
||||
const existingWeight =
|
||||
builderWeightMap.get(builderPendingPaymentIndex) ??
|
||||
(state as CachedBeaconStateGloas).builderPendingPayments.get(builderPendingPaymentIndex).weight;
|
||||
const updatedWeight = existingWeight + paymentWeightToAdd * EFFECTIVE_BALANCE_INCREMENT;
|
||||
builderWeightMap.set(builderPendingPaymentIndex, updatedWeight);
|
||||
}
|
||||
}
|
||||
|
||||
for (const [index, weight] of builderWeightMap) {
|
||||
const payment = (state as CachedBeaconStateGloas).builderPendingPayments.get(index);
|
||||
if (payment.withdrawal.amount > 0) {
|
||||
payment.weight = weight;
|
||||
}
|
||||
}
|
||||
|
||||
metrics?.newSeenAttestersPerBlock.set(newSeenAttesters);
|
||||
@@ -145,7 +177,8 @@ export function getAttestationParticipationStatus(
|
||||
data: phase0.AttestationData,
|
||||
inclusionDelay: number,
|
||||
currentEpoch: Epoch,
|
||||
rootCache: RootCache
|
||||
rootCache: RootCache,
|
||||
executionPayloadAvailability: boolean[] | null
|
||||
): number {
|
||||
const justifiedCheckpoint =
|
||||
data.target.epoch === currentEpoch ? rootCache.currentJustifiedCheckpoint : rootCache.previousJustifiedCheckpoint;
|
||||
@@ -168,9 +201,33 @@ export function getAttestationParticipationStatus(
|
||||
const isMatchingTarget = byteArrayEquals(data.target.root, rootCache.getBlockRoot(data.target.epoch));
|
||||
|
||||
// a timely head is only be set if the target is _also_ matching
|
||||
const isMatchingHead =
|
||||
// In gloas, this is called `head_root_matches`
|
||||
let isMatchingHead =
|
||||
isMatchingTarget && byteArrayEquals(data.beaconBlockRoot, rootCache.getBlockRootAtSlot(data.slot));
|
||||
|
||||
if (fork >= ForkSeq.gloas) {
|
||||
let isMatchingPayload = false;
|
||||
|
||||
if (isAttestationSameSlotRootCache(rootCache, data)) {
|
||||
if (data.index !== 0) {
|
||||
throw new Error("Attesting same slot must indicate empty payload");
|
||||
}
|
||||
isMatchingPayload = true;
|
||||
} else {
|
||||
if (executionPayloadAvailability === null) {
|
||||
throw new Error("Must supply executionPayloadAvailability post-gloas");
|
||||
}
|
||||
|
||||
if (data.index !== 0 && data.index !== 1) {
|
||||
throw new Error(`data index must be 0 or 1 index=${data.index}`);
|
||||
}
|
||||
|
||||
isMatchingPayload = Boolean(data.index) === executionPayloadAvailability[data.slot % SLOTS_PER_HISTORICAL_ROOT];
|
||||
}
|
||||
|
||||
isMatchingHead = isMatchingHead && isMatchingPayload;
|
||||
}
|
||||
|
||||
let flags = 0;
|
||||
if (isMatchingSource && inclusionDelay <= SLOTS_PER_EPOCH_SQRT) flags |= TIMELY_SOURCE;
|
||||
if (isMatchingTarget && isTimelyTarget(fork, inclusionDelay)) flags |= TIMELY_TARGET;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import {FAR_FUTURE_EPOCH, MIN_ACTIVATION_BALANCE, PENDING_CONSOLIDATIONS_LIMIT} from "@lodestar/params";
|
||||
import {FAR_FUTURE_EPOCH, ForkSeq, MIN_ACTIVATION_BALANCE, PENDING_CONSOLIDATIONS_LIMIT} from "@lodestar/params";
|
||||
import {electra, ssz} from "@lodestar/types";
|
||||
import {CachedBeaconStateElectra} from "../types.js";
|
||||
import {CachedBeaconStateElectra, CachedBeaconStateGloas} from "../types.js";
|
||||
import {hasEth1WithdrawalCredential} from "../util/capella.js";
|
||||
import {
|
||||
hasCompoundingWithdrawalCredential,
|
||||
@@ -13,7 +13,8 @@ import {getConsolidationChurnLimit, getPendingBalanceToWithdraw, isActiveValidat
|
||||
|
||||
// TODO Electra: Clean up necessary as there is a lot of overlap with isValidSwitchToCompoundRequest
|
||||
export function processConsolidationRequest(
|
||||
state: CachedBeaconStateElectra,
|
||||
fork: ForkSeq,
|
||||
state: CachedBeaconStateElectra | CachedBeaconStateGloas,
|
||||
consolidationRequest: electra.ConsolidationRequest
|
||||
): void {
|
||||
const {sourcePubkey, targetPubkey, sourceAddress} = consolidationRequest;
|
||||
@@ -82,7 +83,7 @@ export function processConsolidationRequest(
|
||||
}
|
||||
|
||||
// Verify the source has no pending withdrawals in the queue
|
||||
if (getPendingBalanceToWithdraw(state, sourceIndex) > 0) {
|
||||
if (getPendingBalanceToWithdraw(fork, state, sourceIndex) > 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -103,7 +104,7 @@ export function processConsolidationRequest(
|
||||
* Determine if we should set consolidation target validator to compounding credential
|
||||
*/
|
||||
function isValidSwitchToCompoundRequest(
|
||||
state: CachedBeaconStateElectra,
|
||||
state: CachedBeaconStateElectra | CachedBeaconStateGloas,
|
||||
consolidationRequest: electra.ConsolidationRequest
|
||||
): boolean {
|
||||
const {sourcePubkey, targetPubkey, sourceAddress} = consolidationRequest;
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
import {UNSET_DEPOSIT_REQUESTS_START_INDEX} from "@lodestar/params";
|
||||
import {electra, ssz} from "@lodestar/types";
|
||||
import {CachedBeaconStateElectra} from "../types.js";
|
||||
import {CachedBeaconStateElectra, CachedBeaconStateGloas} from "../types.js";
|
||||
|
||||
export function processDepositRequest(state: CachedBeaconStateElectra, depositRequest: electra.DepositRequest): void {
|
||||
export function processDepositRequest(
|
||||
state: CachedBeaconStateElectra | CachedBeaconStateGloas,
|
||||
depositRequest: electra.DepositRequest
|
||||
): void {
|
||||
if (state.depositRequestsStartIndex === UNSET_DEPOSIT_REQUESTS_START_INDEX) {
|
||||
state.depositRequestsStartIndex = depositRequest.index;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,120 @@
|
||||
import {PublicKey, Signature, verify} from "@chainsafe/blst";
|
||||
import {byteArrayEquals} from "@chainsafe/ssz";
|
||||
import {
|
||||
DOMAIN_BEACON_BUILDER,
|
||||
FAR_FUTURE_EPOCH,
|
||||
ForkPostGloas,
|
||||
MIN_ACTIVATION_BALANCE,
|
||||
SLOTS_PER_EPOCH,
|
||||
} from "@lodestar/params";
|
||||
import {BeaconBlock, gloas, ssz} from "@lodestar/types";
|
||||
import {toHex, toRootHex} from "@lodestar/utils";
|
||||
import {G2_POINT_AT_INFINITY} from "../constants/constants.ts";
|
||||
import {CachedBeaconStateGloas} from "../types.ts";
|
||||
import {hasBuilderWithdrawalCredential} from "../util/gloas.ts";
|
||||
import {computeSigningRoot, getCurrentEpoch, getRandaoMix, isActiveValidator} from "../util/index.ts";
|
||||
|
||||
export function processExecutionPayloadBid(state: CachedBeaconStateGloas, block: BeaconBlock<ForkPostGloas>): void {
|
||||
const signedBid = block.body.signedExecutionPayloadBid;
|
||||
const bid = signedBid.message;
|
||||
const {builderIndex, value: amount} = bid;
|
||||
const builder = state.validators.getReadonly(builderIndex);
|
||||
|
||||
// For self-builds, amount must be zero regardless of withdrawal credential prefix
|
||||
if (builderIndex === block.proposerIndex) {
|
||||
if (amount !== 0) {
|
||||
throw Error(`Invalid execution payload bid: self-build with non-zero amount ${amount}`);
|
||||
}
|
||||
if (!byteArrayEquals(signedBid.signature, G2_POINT_AT_INFINITY)) {
|
||||
throw Error("Invalid execution payload bid: self-build with non-zero signature");
|
||||
}
|
||||
// Non-self builds require builder withdrawal credential
|
||||
} else {
|
||||
if (!hasBuilderWithdrawalCredential(builder.withdrawalCredentials)) {
|
||||
throw Error(`Invalid execution payload bid: builder ${builderIndex} does not have builder withdrawal credential`);
|
||||
}
|
||||
|
||||
if (!verifyExecutionPayloadBidSignature(state, builder.pubkey, signedBid)) {
|
||||
throw Error(`Invalid execution payload bid: invalid signature for builder ${builderIndex}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (!isActiveValidator(builder, getCurrentEpoch(state))) {
|
||||
throw Error(`Invalid execution payload bid: builder ${builderIndex} is not active`);
|
||||
}
|
||||
|
||||
if (builder.slashed) {
|
||||
throw Error(`Invalid execution payload bid: builder ${builderIndex} is slashed`);
|
||||
}
|
||||
|
||||
const pendingPayments = state.builderPendingPayments
|
||||
.getAllReadonly()
|
||||
.filter((payment) => payment.withdrawal.builderIndex === builderIndex)
|
||||
.reduce((acc, payment) => acc + payment.withdrawal.amount, 0);
|
||||
const pendingWithdrawals = state.builderPendingWithdrawals
|
||||
.getAllReadonly()
|
||||
.filter((withdrawal) => withdrawal.builderIndex === builderIndex)
|
||||
.reduce((acc, withdrawal) => acc + withdrawal.amount, 0);
|
||||
|
||||
if (
|
||||
amount !== 0 &&
|
||||
state.balances.get(builderIndex) < amount + pendingPayments + pendingWithdrawals + MIN_ACTIVATION_BALANCE
|
||||
) {
|
||||
throw Error("Insufficient builder balance");
|
||||
}
|
||||
|
||||
if (bid.slot !== block.slot) {
|
||||
throw Error(`Bid slot ${bid.slot} does not match block slot ${block.slot}`);
|
||||
}
|
||||
|
||||
if (!byteArrayEquals(bid.parentBlockHash, state.latestBlockHash)) {
|
||||
throw Error(
|
||||
`Parent block hash ${toRootHex(bid.parentBlockHash)} of bid does not match state's latest block hash ${toRootHex(state.latestBlockHash)}`
|
||||
);
|
||||
}
|
||||
|
||||
if (!byteArrayEquals(bid.parentBlockRoot, block.parentRoot)) {
|
||||
throw Error(
|
||||
`Parent block root ${toRootHex(bid.parentBlockRoot)} of bid does not match block's parent root ${toRootHex(block.parentRoot)}`
|
||||
);
|
||||
}
|
||||
|
||||
const stateRandao = getRandaoMix(state, getCurrentEpoch(state));
|
||||
if (!byteArrayEquals(bid.prevRandao, stateRandao)) {
|
||||
throw Error(`Prev randao ${toHex(bid.prevRandao)} of bid does not match state's randao mix ${toHex(stateRandao)}`);
|
||||
}
|
||||
|
||||
if (amount > 0) {
|
||||
const pendingPaymentView = ssz.gloas.BuilderPendingPayment.toViewDU({
|
||||
weight: 0,
|
||||
withdrawal: ssz.gloas.BuilderPendingWithdrawal.toViewDU({
|
||||
feeRecipient: bid.feeRecipient,
|
||||
amount,
|
||||
builderIndex,
|
||||
withdrawableEpoch: FAR_FUTURE_EPOCH,
|
||||
}),
|
||||
});
|
||||
|
||||
state.builderPendingPayments.set(SLOTS_PER_EPOCH + (bid.slot % SLOTS_PER_EPOCH), pendingPaymentView);
|
||||
}
|
||||
|
||||
state.latestExecutionPayloadBid = ssz.gloas.ExecutionPayloadBid.toViewDU(bid);
|
||||
}
|
||||
|
||||
function verifyExecutionPayloadBidSignature(
|
||||
state: CachedBeaconStateGloas,
|
||||
pubkey: Uint8Array,
|
||||
signedBid: gloas.SignedExecutionPayloadBid
|
||||
): boolean {
|
||||
const domain = state.config.getDomain(state.slot, DOMAIN_BEACON_BUILDER);
|
||||
const signingRoot = computeSigningRoot(ssz.gloas.ExecutionPayloadBid, signedBid.message, domain);
|
||||
|
||||
try {
|
||||
const publicKey = PublicKey.fromBytes(pubkey);
|
||||
const signature = Signature.fromBytes(signedBid.signature, true);
|
||||
|
||||
return verify(signingRoot, publicKey, signature);
|
||||
} catch (_e) {
|
||||
return false; // Catch all BLS errors: failed key validation, failed signature validation, invalid signature
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,181 @@
|
||||
import {PublicKey, Signature, verify} from "@chainsafe/blst";
|
||||
import {byteArrayEquals} from "@chainsafe/ssz";
|
||||
import {DOMAIN_BEACON_BUILDER, SLOTS_PER_EPOCH, SLOTS_PER_HISTORICAL_ROOT} from "@lodestar/params";
|
||||
import {gloas, ssz} from "@lodestar/types";
|
||||
import {toHex, toRootHex} from "@lodestar/utils";
|
||||
import {CachedBeaconStateGloas} from "../types.ts";
|
||||
import {computeExitEpochAndUpdateChurn, computeSigningRoot, computeTimeAtSlot} from "../util/index.ts";
|
||||
import {processConsolidationRequest} from "./processConsolidationRequest.ts";
|
||||
import {processDepositRequest} from "./processDepositRequest.ts";
|
||||
import {processWithdrawalRequest} from "./processWithdrawalRequest.ts";
|
||||
|
||||
// This function does not call execution engine to verify payload. Need to call it from other place
|
||||
export function processExecutionPayloadEnvelope(
|
||||
state: CachedBeaconStateGloas,
|
||||
signedEnvelope: gloas.SignedExecutionPayloadEnvelope,
|
||||
verify: boolean
|
||||
): void {
|
||||
const envelope = signedEnvelope.message;
|
||||
const payload = envelope.payload;
|
||||
const fork = state.config.getForkSeq(envelope.slot);
|
||||
|
||||
if (verify) {
|
||||
const builderIndex = envelope.builderIndex;
|
||||
const pubkey = state.validators.getReadonly(builderIndex).pubkey;
|
||||
|
||||
if (!verifyExecutionPayloadEnvelopeSignature(state, pubkey, signedEnvelope)) {
|
||||
throw new Error("Payload Envelope has invalid signature");
|
||||
}
|
||||
}
|
||||
|
||||
validateExecutionPayloadEnvelope(state, envelope);
|
||||
|
||||
const requests = envelope.executionRequests;
|
||||
|
||||
for (const deposit of requests.deposits) {
|
||||
processDepositRequest(state, deposit);
|
||||
}
|
||||
|
||||
for (const withdrawal of requests.withdrawals) {
|
||||
processWithdrawalRequest(fork, state, withdrawal);
|
||||
}
|
||||
|
||||
for (const consolidation of requests.consolidations) {
|
||||
processConsolidationRequest(fork, state, consolidation);
|
||||
}
|
||||
|
||||
// Queue the builder payment
|
||||
const paymentIndex = SLOTS_PER_EPOCH + (state.slot % SLOTS_PER_EPOCH);
|
||||
const payment = state.builderPendingPayments.get(paymentIndex).clone();
|
||||
const amount = payment.withdrawal.amount;
|
||||
|
||||
if (amount > 0) {
|
||||
const exitQueueEpoch = computeExitEpochAndUpdateChurn(state, BigInt(amount));
|
||||
|
||||
payment.withdrawal.withdrawableEpoch = exitQueueEpoch + state.config.MIN_VALIDATOR_WITHDRAWABILITY_DELAY;
|
||||
state.builderPendingWithdrawals.push(payment.withdrawal);
|
||||
}
|
||||
|
||||
state.builderPendingPayments.set(paymentIndex, ssz.gloas.BuilderPendingPayment.defaultViewDU());
|
||||
|
||||
// Cache the execution payload hash
|
||||
state.executionPayloadAvailability.set(state.slot % SLOTS_PER_HISTORICAL_ROOT, true);
|
||||
state.latestBlockHash = payload.blockHash;
|
||||
|
||||
if (verify && !byteArrayEquals(envelope.stateRoot, state.hashTreeRoot())) {
|
||||
throw new Error(
|
||||
`Envelope's state root does not match state envelope=${toRootHex(envelope.stateRoot)} state=${toRootHex(state.hashTreeRoot())}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function validateExecutionPayloadEnvelope(
|
||||
state: CachedBeaconStateGloas,
|
||||
envelope: gloas.ExecutionPayloadEnvelope
|
||||
): void {
|
||||
const payload = envelope.payload;
|
||||
|
||||
if (byteArrayEquals(state.latestBlockHeader.stateRoot, ssz.Root.defaultValue())) {
|
||||
const previousStateRoot = state.hashTreeRoot();
|
||||
state.latestBlockHeader.stateRoot = previousStateRoot;
|
||||
}
|
||||
|
||||
// Verify consistency with the beacon block
|
||||
if (!byteArrayEquals(envelope.beaconBlockRoot, state.latestBlockHeader.hashTreeRoot())) {
|
||||
throw new Error(
|
||||
`Envelope's block is not the latest block header envelope=${toRootHex(envelope.beaconBlockRoot)} latestBlockHeader=${toRootHex(state.latestBlockHeader.hashTreeRoot())}`
|
||||
);
|
||||
}
|
||||
|
||||
// Verify consistency with the beacon block
|
||||
if (envelope.slot !== state.slot) {
|
||||
throw new Error(`Slot mismatch between envelope and state envelope=${envelope.slot} state=${state.slot}`);
|
||||
}
|
||||
|
||||
const committedBid = state.latestExecutionPayloadBid;
|
||||
// Verify consistency with the committed bid
|
||||
if (envelope.builderIndex !== committedBid.builderIndex) {
|
||||
throw new Error(
|
||||
`Builder index mismatch between envelope and committed bid envelope=${envelope.builderIndex} committedBid=${committedBid.builderIndex}`
|
||||
);
|
||||
}
|
||||
|
||||
// Verify consistency with the committed bid
|
||||
const envelopeKzgRoot = ssz.deneb.BlobKzgCommitments.hashTreeRoot(envelope.blobKzgCommitments);
|
||||
if (!byteArrayEquals(committedBid.blobKzgCommitmentsRoot, envelopeKzgRoot)) {
|
||||
throw new Error(
|
||||
`Kzg commitment root mismatch between envelope and committed bid envelope=${toRootHex(envelopeKzgRoot)} committedBid=${toRootHex(committedBid.blobKzgCommitmentsRoot)}`
|
||||
);
|
||||
}
|
||||
|
||||
// Verify the withdrawals root
|
||||
const envelopeWithdrawalsRoot = ssz.capella.Withdrawals.hashTreeRoot(envelope.payload.withdrawals);
|
||||
if (!byteArrayEquals(state.latestWithdrawalsRoot, envelopeWithdrawalsRoot)) {
|
||||
throw new Error(
|
||||
`Withdrawals root mismatch between envelope and latest withdrawals root envelope=${toRootHex(envelopeWithdrawalsRoot)} latestWithdrawalRoot=${toRootHex(state.latestWithdrawalsRoot)}`
|
||||
);
|
||||
}
|
||||
|
||||
// Verify the gas_limit
|
||||
if (Number(committedBid.gasLimit) !== payload.gasLimit) {
|
||||
throw new Error(
|
||||
`Gas limit mismatch between envelope's payload and committed bid envelope=${payload.gasLimit} committedBid=${Number(committedBid.gasLimit)}`
|
||||
);
|
||||
}
|
||||
|
||||
// Verify the block hash
|
||||
if (!byteArrayEquals(committedBid.blockHash, payload.blockHash)) {
|
||||
throw new Error(
|
||||
`Block hash mismatch between envelope's payload and committed bid envelope=${toRootHex(payload.blockHash)} committedBid=${toRootHex(committedBid.blockHash)}`
|
||||
);
|
||||
}
|
||||
|
||||
// Verify consistency of the parent hash with respect to the previous execution payload
|
||||
if (!byteArrayEquals(payload.parentHash, state.latestBlockHash)) {
|
||||
throw new Error(
|
||||
`Parent hash mismatch between envelope's payload and state envelope=${toRootHex(payload.parentHash)} state=${toRootHex(state.latestBlockHash)}`
|
||||
);
|
||||
}
|
||||
|
||||
// Verify prev_randao matches committed bid
|
||||
if (!byteArrayEquals(committedBid.prevRandao, payload.prevRandao)) {
|
||||
throw new Error(
|
||||
`Prev randao mismatch between committed bid and payload committedBid=${toHex(committedBid.prevRandao)} payload=${toHex(payload.prevRandao)}`
|
||||
);
|
||||
}
|
||||
|
||||
// Verify timestamp
|
||||
if (payload.timestamp !== computeTimeAtSlot(state.config, state.slot, state.genesisTime)) {
|
||||
throw new Error(
|
||||
`Timestamp mismatch between envelope's payload and state envelope=${payload.timestamp} state=${computeTimeAtSlot(state.config, state.slot, state.genesisTime)}`
|
||||
);
|
||||
}
|
||||
|
||||
// Verify commitments are under limit
|
||||
const maxBlobsPerBlock = state.config.getMaxBlobsPerBlock(state.epochCtx.epoch);
|
||||
if (envelope.blobKzgCommitments.length > maxBlobsPerBlock) {
|
||||
throw new Error(
|
||||
`Kzg commitments exceed limit commitment.length=${envelope.blobKzgCommitments.length} limit=${maxBlobsPerBlock}`
|
||||
);
|
||||
}
|
||||
|
||||
// Skipped: Verify the execution payload is valid
|
||||
}
|
||||
|
||||
function verifyExecutionPayloadEnvelopeSignature(
|
||||
state: CachedBeaconStateGloas,
|
||||
pubkey: Uint8Array,
|
||||
signedEnvelope: gloas.SignedExecutionPayloadEnvelope
|
||||
): boolean {
|
||||
const domain = state.config.getDomain(state.slot, DOMAIN_BEACON_BUILDER);
|
||||
const signingRoot = computeSigningRoot(ssz.gloas.ExecutionPayloadEnvelope, signedEnvelope.message, domain);
|
||||
|
||||
try {
|
||||
const publicKey = PublicKey.fromBytes(pubkey);
|
||||
const signature = Signature.fromBytes(signedEnvelope.signature, true);
|
||||
|
||||
return verify(signingRoot, publicKey, signature);
|
||||
} catch (_e) {
|
||||
return false; // Catch all BLS errors: failed key validation, failed signature validation, invalid signature
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,12 @@
|
||||
import {ForkSeq} from "@lodestar/params";
|
||||
import {BeaconBlockBody, capella, electra} from "@lodestar/types";
|
||||
import {BeaconBlockBody, capella, electra, gloas} from "@lodestar/types";
|
||||
import {BeaconStateTransitionMetrics} from "../metrics.js";
|
||||
import {CachedBeaconStateAllForks, CachedBeaconStateCapella, CachedBeaconStateElectra} from "../types.js";
|
||||
import {
|
||||
CachedBeaconStateAllForks,
|
||||
CachedBeaconStateCapella,
|
||||
CachedBeaconStateElectra,
|
||||
CachedBeaconStateGloas,
|
||||
} from "../types.js";
|
||||
import {getEth1DepositCount} from "../util/deposit.js";
|
||||
import {processAttestations} from "./processAttestations.js";
|
||||
import {processAttesterSlashing} from "./processAttesterSlashing.js";
|
||||
@@ -9,6 +14,7 @@ import {processBlsToExecutionChange} from "./processBlsToExecutionChange.js";
|
||||
import {processConsolidationRequest} from "./processConsolidationRequest.js";
|
||||
import {processDeposit} from "./processDeposit.js";
|
||||
import {processDepositRequest} from "./processDepositRequest.js";
|
||||
import {processPayloadAttestation} from "./processPayloadAttestation.ts";
|
||||
import {processProposerSlashing} from "./processProposerSlashing.js";
|
||||
import {processVoluntaryExit} from "./processVoluntaryExit.js";
|
||||
import {processWithdrawalRequest} from "./processWithdrawalRequest.js";
|
||||
@@ -64,7 +70,7 @@ export function processOperations(
|
||||
}
|
||||
}
|
||||
|
||||
if (fork >= ForkSeq.electra) {
|
||||
if (fork >= ForkSeq.electra && fork < ForkSeq.gloas) {
|
||||
const stateElectra = state as CachedBeaconStateElectra;
|
||||
const bodyElectra = body as electra.BeaconBlockBody;
|
||||
|
||||
@@ -77,7 +83,13 @@ export function processOperations(
|
||||
}
|
||||
|
||||
for (const elConsolidationRequest of bodyElectra.executionRequests.consolidations) {
|
||||
processConsolidationRequest(stateElectra, elConsolidationRequest);
|
||||
processConsolidationRequest(fork, stateElectra, elConsolidationRequest);
|
||||
}
|
||||
}
|
||||
|
||||
if (fork >= ForkSeq.gloas) {
|
||||
for (const payloadAttestation of (body as gloas.BeaconBlockBody).payloadAttestations) {
|
||||
processPayloadAttestation(state as CachedBeaconStateGloas, payloadAttestation);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,25 @@
|
||||
import {byteArrayEquals} from "@chainsafe/ssz";
|
||||
import {gloas} from "@lodestar/types";
|
||||
import {CachedBeaconStateGloas} from "../types.ts";
|
||||
import {isValidIndexedPayloadAttestation} from "./isValidIndexedPayloadAttestation.ts";
|
||||
|
||||
export function processPayloadAttestation(
|
||||
state: CachedBeaconStateGloas,
|
||||
payloadAttestation: gloas.PayloadAttestation
|
||||
): void {
|
||||
const data = payloadAttestation.data;
|
||||
|
||||
if (!byteArrayEquals(data.beaconBlockRoot, state.latestBlockHeader.parentRoot)) {
|
||||
throw Error("Payload attestation is referring to the wrong block");
|
||||
}
|
||||
|
||||
if (data.slot + 1 !== state.slot) {
|
||||
throw Error("Payload attestation is not from previous slot");
|
||||
}
|
||||
|
||||
const indexedPayloadAttestation = state.epochCtx.getIndexedPayloadAttestation(data.slot, payloadAttestation);
|
||||
|
||||
if (!isValidIndexedPayloadAttestation(state, indexedPayloadAttestation, true)) {
|
||||
throw Error("Invalid payload attestation");
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
import {ForkSeq} from "@lodestar/params";
|
||||
import {ForkSeq, SLOTS_PER_EPOCH} from "@lodestar/params";
|
||||
import {phase0, ssz} from "@lodestar/types";
|
||||
import {getProposerSlashingSignatureSets} from "../signatureSets/index.js";
|
||||
import {CachedBeaconStateAllForks} from "../types.js";
|
||||
import {isSlashableValidator} from "../util/index.js";
|
||||
import {CachedBeaconStateAllForks, CachedBeaconStateGloas} from "../types.js";
|
||||
import {computeEpochAtSlot, isSlashableValidator} from "../util/index.js";
|
||||
import {verifySignatureSet} from "../util/signatureSets.js";
|
||||
import {slashValidator} from "./slashValidator.js";
|
||||
|
||||
@@ -20,6 +20,27 @@ export function processProposerSlashing(
|
||||
): void {
|
||||
assertValidProposerSlashing(state, proposerSlashing, verifySignatures);
|
||||
|
||||
if (fork >= ForkSeq.gloas) {
|
||||
const slot = Number(proposerSlashing.signedHeader1.message.slot);
|
||||
const proposalEpoch = computeEpochAtSlot(slot);
|
||||
const currentEpoch = state.epochCtx.epoch;
|
||||
const previousEpoch = currentEpoch - 1;
|
||||
|
||||
const paymentIndex =
|
||||
proposalEpoch === currentEpoch
|
||||
? SLOTS_PER_EPOCH + (slot % SLOTS_PER_EPOCH)
|
||||
: proposalEpoch === previousEpoch
|
||||
? slot % SLOTS_PER_EPOCH
|
||||
: undefined;
|
||||
|
||||
if (paymentIndex !== undefined) {
|
||||
(state as CachedBeaconStateGloas).builderPendingPayments.set(
|
||||
paymentIndex,
|
||||
ssz.gloas.BuilderPendingPayment.defaultViewDU()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
slashValidator(fork, state, proposerSlashing.signedHeader1.message.proposerIndex);
|
||||
}
|
||||
|
||||
|
||||
@@ -69,7 +69,7 @@ export function getVoluntaryExitValidity(
|
||||
// only exit validator if it has no pending withdrawals in the queue
|
||||
if (
|
||||
fork >= ForkSeq.electra &&
|
||||
getPendingBalanceToWithdraw(state as CachedBeaconStateElectra, voluntaryExit.validatorIndex) !== 0
|
||||
getPendingBalanceToWithdraw(fork, state as CachedBeaconStateElectra, voluntaryExit.validatorIndex) !== 0
|
||||
) {
|
||||
return VoluntaryExitValidity.pendingWithdrawals;
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ import {
|
||||
} from "@lodestar/params";
|
||||
import {electra, phase0, ssz} from "@lodestar/types";
|
||||
import {toHex} from "@lodestar/utils";
|
||||
import {CachedBeaconStateElectra} from "../types.js";
|
||||
import {CachedBeaconStateElectra, CachedBeaconStateGloas} from "../types.js";
|
||||
import {hasCompoundingWithdrawalCredential, hasExecutionWithdrawalCredential} from "../util/electra.js";
|
||||
import {computeExitEpochAndUpdateChurn} from "../util/epoch.js";
|
||||
import {getPendingBalanceToWithdraw, isActiveValidator} from "../util/validator.js";
|
||||
@@ -15,7 +15,7 @@ import {initiateValidatorExit} from "./initiateValidatorExit.js";
|
||||
|
||||
export function processWithdrawalRequest(
|
||||
fork: ForkSeq,
|
||||
state: CachedBeaconStateElectra,
|
||||
state: CachedBeaconStateElectra | CachedBeaconStateGloas,
|
||||
withdrawalRequest: electra.WithdrawalRequest
|
||||
): void {
|
||||
const amount = Number(withdrawalRequest.amount);
|
||||
@@ -42,7 +42,7 @@ export function processWithdrawalRequest(
|
||||
}
|
||||
|
||||
// TODO Electra: Consider caching pendingPartialWithdrawals
|
||||
const pendingBalanceToWithdraw = getPendingBalanceToWithdraw(state, validatorIndex);
|
||||
const pendingBalanceToWithdraw = getPendingBalanceToWithdraw(fork, state, validatorIndex);
|
||||
const validatorBalance = state.balances.get(validatorIndex);
|
||||
|
||||
if (isFullExitRequest) {
|
||||
@@ -81,7 +81,7 @@ export function processWithdrawalRequest(
|
||||
function isValidatorEligibleForWithdrawOrExit(
|
||||
validator: phase0.Validator,
|
||||
sourceAddress: Uint8Array,
|
||||
state: CachedBeaconStateElectra
|
||||
state: CachedBeaconStateElectra | CachedBeaconStateGloas
|
||||
): boolean {
|
||||
const {withdrawalCredentials} = validator;
|
||||
const addressStr = toHex(withdrawalCredentials.subarray(12));
|
||||
|
||||
@@ -10,7 +10,8 @@ import {
|
||||
} from "@lodestar/params";
|
||||
import {ValidatorIndex, capella, ssz} from "@lodestar/types";
|
||||
import {MapDef, toRootHex} from "@lodestar/utils";
|
||||
import {CachedBeaconStateCapella, CachedBeaconStateElectra} from "../types.js";
|
||||
import {CachedBeaconStateCapella, CachedBeaconStateElectra, CachedBeaconStateGloas} from "../types.js";
|
||||
import {isBuilderPaymentWithdrawable, isParentBlockFull} from "../util/gloas.ts";
|
||||
import {
|
||||
decreaseBalance,
|
||||
getMaxEffectiveBalance,
|
||||
@@ -21,31 +22,48 @@ import {
|
||||
|
||||
export function processWithdrawals(
|
||||
fork: ForkSeq,
|
||||
state: CachedBeaconStateCapella | CachedBeaconStateElectra,
|
||||
payload: capella.FullOrBlindedExecutionPayload
|
||||
state: CachedBeaconStateCapella | CachedBeaconStateElectra | CachedBeaconStateGloas,
|
||||
payload?: capella.FullOrBlindedExecutionPayload
|
||||
): void {
|
||||
// Return early if the parent block is empty
|
||||
if (fork >= ForkSeq.gloas && !isParentBlockFull(state as CachedBeaconStateGloas)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// processedPartialWithdrawalsCount is withdrawals coming from EL since electra (EIP-7002)
|
||||
const {withdrawals: expectedWithdrawals, processedPartialWithdrawalsCount} = getExpectedWithdrawals(fork, state);
|
||||
// processedBuilderWithdrawalsCount is withdrawals coming from builder payment since gloas (EIP-7732)
|
||||
const {
|
||||
withdrawals: expectedWithdrawals,
|
||||
processedPartialWithdrawalsCount,
|
||||
processedBuilderWithdrawalsCount,
|
||||
} = getExpectedWithdrawals(fork, state);
|
||||
const numWithdrawals = expectedWithdrawals.length;
|
||||
|
||||
if (isCapellaPayloadHeader(payload)) {
|
||||
const expectedWithdrawalsRoot = ssz.capella.Withdrawals.hashTreeRoot(expectedWithdrawals);
|
||||
const actualWithdrawalsRoot = payload.withdrawalsRoot;
|
||||
if (!byteArrayEquals(expectedWithdrawalsRoot, actualWithdrawalsRoot)) {
|
||||
throw Error(
|
||||
`Invalid withdrawalsRoot of executionPayloadHeader, expected=${toRootHex(
|
||||
expectedWithdrawalsRoot
|
||||
)}, actual=${toRootHex(actualWithdrawalsRoot)}`
|
||||
);
|
||||
// After gloas, withdrawals are verified later in processExecutionPayloadEnvelope
|
||||
if (fork < ForkSeq.gloas) {
|
||||
if (payload === undefined) {
|
||||
throw Error("payload is required for pre-gloas processWithdrawals");
|
||||
}
|
||||
} else {
|
||||
if (expectedWithdrawals.length !== payload.withdrawals.length) {
|
||||
throw Error(`Invalid withdrawals length expected=${numWithdrawals} actual=${payload.withdrawals.length}`);
|
||||
}
|
||||
for (let i = 0; i < numWithdrawals; i++) {
|
||||
const withdrawal = expectedWithdrawals[i];
|
||||
if (!ssz.capella.Withdrawal.equals(withdrawal, payload.withdrawals[i])) {
|
||||
throw Error(`Withdrawal mismatch at index=${i}`);
|
||||
|
||||
if (isCapellaPayloadHeader(payload)) {
|
||||
const expectedWithdrawalsRoot = ssz.capella.Withdrawals.hashTreeRoot(expectedWithdrawals);
|
||||
const actualWithdrawalsRoot = payload.withdrawalsRoot;
|
||||
if (!byteArrayEquals(expectedWithdrawalsRoot, actualWithdrawalsRoot)) {
|
||||
throw Error(
|
||||
`Invalid withdrawalsRoot of executionPayloadHeader, expected=${toRootHex(
|
||||
expectedWithdrawalsRoot
|
||||
)}, actual=${toRootHex(actualWithdrawalsRoot)}`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
if (expectedWithdrawals.length !== payload.withdrawals.length) {
|
||||
throw Error(`Invalid withdrawals length expected=${numWithdrawals} actual=${payload.withdrawals.length}`);
|
||||
}
|
||||
for (let i = 0; i < numWithdrawals; i++) {
|
||||
const withdrawal = expectedWithdrawals[i];
|
||||
if (!ssz.capella.Withdrawal.equals(withdrawal, payload.withdrawals[i])) {
|
||||
throw Error(`Withdrawal mismatch at index=${i}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -62,6 +80,24 @@ export function processWithdrawals(
|
||||
);
|
||||
}
|
||||
|
||||
if (fork >= ForkSeq.gloas) {
|
||||
const stateGloas = state as CachedBeaconStateGloas;
|
||||
stateGloas.latestWithdrawalsRoot = ssz.capella.Withdrawals.hashTreeRoot(expectedWithdrawals);
|
||||
|
||||
const unprocessedWithdrawals = stateGloas.builderPendingWithdrawals
|
||||
.getAllReadonly()
|
||||
.slice(0, processedBuilderWithdrawalsCount)
|
||||
.filter((w) => !isBuilderPaymentWithdrawable(stateGloas, w));
|
||||
const remainingWithdrawals = stateGloas.builderPendingWithdrawals
|
||||
.sliceFrom(processedBuilderWithdrawalsCount)
|
||||
.getAllReadonly();
|
||||
|
||||
stateGloas.builderPendingWithdrawals = ssz.gloas.BeaconState.fields.builderPendingWithdrawals.toViewDU([
|
||||
...unprocessedWithdrawals,
|
||||
...remainingWithdrawals,
|
||||
]);
|
||||
}
|
||||
|
||||
// Update the nextWithdrawalIndex
|
||||
const latestWithdrawal = expectedWithdrawals.at(-1);
|
||||
if (latestWithdrawal) {
|
||||
@@ -82,11 +118,12 @@ export function processWithdrawals(
|
||||
|
||||
export function getExpectedWithdrawals(
|
||||
fork: ForkSeq,
|
||||
state: CachedBeaconStateCapella | CachedBeaconStateElectra
|
||||
state: CachedBeaconStateCapella | CachedBeaconStateElectra | CachedBeaconStateGloas
|
||||
): {
|
||||
withdrawals: capella.Withdrawal[];
|
||||
sampledValidators: number;
|
||||
processedPartialWithdrawalsCount: number;
|
||||
processedBuilderWithdrawalsCount: number;
|
||||
} {
|
||||
if (fork < ForkSeq.capella) {
|
||||
throw new Error(`getExpectedWithdrawals not supported at forkSeq=${fork} < ForkSeq.capella`);
|
||||
@@ -99,17 +136,71 @@ export function getExpectedWithdrawals(
|
||||
const withdrawals: capella.Withdrawal[] = [];
|
||||
const withdrawnBalances = new MapDef<ValidatorIndex, number>(() => 0);
|
||||
const isPostElectra = fork >= ForkSeq.electra;
|
||||
const isPostGloas = fork >= ForkSeq.gloas;
|
||||
// partialWithdrawalsCount is withdrawals coming from EL since electra (EIP-7002)
|
||||
let processedPartialWithdrawalsCount = 0;
|
||||
// builderWithdrawalsCount is withdrawals coming from builder payments since Gloas (EIP-7732)
|
||||
let processedBuilderWithdrawalsCount = 0;
|
||||
|
||||
if (isPostGloas) {
|
||||
const stateGloas = state as CachedBeaconStateGloas;
|
||||
|
||||
const allBuilderPendingWithdrawals =
|
||||
stateGloas.builderPendingWithdrawals.length <= MAX_WITHDRAWALS_PER_PAYLOAD
|
||||
? stateGloas.builderPendingWithdrawals.getAllReadonly()
|
||||
: null;
|
||||
|
||||
for (let i = 0; i < stateGloas.builderPendingWithdrawals.length; i++) {
|
||||
const withdrawal = allBuilderPendingWithdrawals
|
||||
? allBuilderPendingWithdrawals[i]
|
||||
: stateGloas.builderPendingWithdrawals.getReadonly(i);
|
||||
|
||||
if (withdrawal.withdrawableEpoch > epoch || withdrawals.length + 1 === MAX_WITHDRAWALS_PER_PAYLOAD) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (isBuilderPaymentWithdrawable(stateGloas, withdrawal)) {
|
||||
const totalWithdrawn = withdrawnBalances.getOrDefault(withdrawal.builderIndex);
|
||||
const balance = state.balances.get(withdrawal.builderIndex) - totalWithdrawn;
|
||||
const builder = state.validators.get(withdrawal.builderIndex);
|
||||
|
||||
let withdrawableBalance = 0;
|
||||
|
||||
if (builder.slashed) {
|
||||
withdrawableBalance = balance < withdrawal.amount ? balance : withdrawal.amount;
|
||||
} else if (balance > MIN_ACTIVATION_BALANCE) {
|
||||
withdrawableBalance =
|
||||
balance - MIN_ACTIVATION_BALANCE < withdrawal.amount ? balance - MIN_ACTIVATION_BALANCE : withdrawal.amount;
|
||||
}
|
||||
|
||||
if (withdrawableBalance > 0) {
|
||||
withdrawals.push({
|
||||
index: withdrawalIndex,
|
||||
validatorIndex: withdrawal.builderIndex,
|
||||
address: withdrawal.feeRecipient,
|
||||
amount: BigInt(withdrawableBalance),
|
||||
});
|
||||
withdrawalIndex++;
|
||||
withdrawnBalances.set(withdrawal.builderIndex, totalWithdrawn + withdrawableBalance);
|
||||
}
|
||||
}
|
||||
processedBuilderWithdrawalsCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (isPostElectra) {
|
||||
// In pre-gloas, partialWithdrawalBound == MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP
|
||||
const partialWithdrawalBound = Math.min(
|
||||
withdrawals.length + MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP,
|
||||
MAX_WITHDRAWALS_PER_PAYLOAD - 1
|
||||
);
|
||||
const stateElectra = state as CachedBeaconStateElectra;
|
||||
|
||||
// MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP = 8, PENDING_PARTIAL_WITHDRAWALS_LIMIT: 134217728 so we should only call getAllReadonly() if it makes sense
|
||||
// pendingPartialWithdrawals comes from EIP-7002 smart contract where it takes fee so it's more likely than not validator is in correct condition to withdraw
|
||||
// also we may break early if withdrawableEpoch > epoch
|
||||
const allPendingPartialWithdrawals =
|
||||
stateElectra.pendingPartialWithdrawals.length <= MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP
|
||||
stateElectra.pendingPartialWithdrawals.length <= partialWithdrawalBound
|
||||
? stateElectra.pendingPartialWithdrawals.getAllReadonly()
|
||||
: null;
|
||||
|
||||
@@ -118,7 +209,7 @@ export function getExpectedWithdrawals(
|
||||
const withdrawal = allPendingPartialWithdrawals
|
||||
? allPendingPartialWithdrawals[i]
|
||||
: stateElectra.pendingPartialWithdrawals.getReadonly(i);
|
||||
if (withdrawal.withdrawableEpoch > epoch || withdrawals.length === MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP) {
|
||||
if (withdrawal.withdrawableEpoch > epoch || withdrawals.length === partialWithdrawalBound) {
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -147,11 +238,11 @@ export function getExpectedWithdrawals(
|
||||
}
|
||||
}
|
||||
|
||||
const bound = Math.min(validators.length, MAX_VALIDATORS_PER_WITHDRAWALS_SWEEP);
|
||||
const withdrawalBound = Math.min(validators.length, MAX_VALIDATORS_PER_WITHDRAWALS_SWEEP);
|
||||
let n = 0;
|
||||
// Just run a bounded loop max iterating over all withdrawals
|
||||
// however breaks out once we have MAX_WITHDRAWALS_PER_PAYLOAD
|
||||
for (n = 0; n < bound; n++) {
|
||||
for (n = 0; n < withdrawalBound; n++) {
|
||||
// Get next validator in turn
|
||||
const validatorIndex = (nextWithdrawalValidatorIndex + n) % validators.length;
|
||||
|
||||
@@ -203,5 +294,5 @@ export function getExpectedWithdrawals(
|
||||
}
|
||||
}
|
||||
|
||||
return {withdrawals, sampledValidators: n, processedPartialWithdrawalsCount};
|
||||
return {withdrawals, sampledValidators: n, processedPartialWithdrawalsCount, processedBuilderWithdrawalsCount};
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ import {
|
||||
SyncPeriod,
|
||||
ValidatorIndex,
|
||||
electra,
|
||||
gloas,
|
||||
phase0,
|
||||
} from "@lodestar/types";
|
||||
import {LodestarError} from "@lodestar/utils";
|
||||
@@ -46,6 +47,7 @@ import {
|
||||
getSeed,
|
||||
isActiveValidator,
|
||||
isAggregatorFromCommitteeLength,
|
||||
naiveGetPayloadTimlinessCommitteeIndices,
|
||||
} from "../util/index.js";
|
||||
import {computeBaseRewardPerIncrement, computeSyncParticipantReward} from "../util/syncCommittee.js";
|
||||
import {sumTargetUnslashedBalanceIncrements} from "../util/targetUnslashedBalance.js";
|
||||
@@ -59,7 +61,7 @@ import {
|
||||
computeSyncCommitteeCache,
|
||||
getSyncCommitteeCache,
|
||||
} from "./syncCommitteeCache.js";
|
||||
import {BeaconStateAllForks, BeaconStateAltair} from "./types.js";
|
||||
import {BeaconStateAllForks, BeaconStateAltair, BeaconStateGloas} from "./types.js";
|
||||
|
||||
/** `= PROPOSER_WEIGHT / (WEIGHT_DENOMINATOR - PROPOSER_WEIGHT)` */
|
||||
export const PROPOSER_WEIGHT_FACTOR = PROPOSER_WEIGHT / (WEIGHT_DENOMINATOR - PROPOSER_WEIGHT);
|
||||
@@ -238,6 +240,10 @@ export class EpochCache {
|
||||
/** TODO: Indexed SyncCommitteeCache */
|
||||
nextSyncCommitteeIndexed: SyncCommitteeCache;
|
||||
|
||||
// TODO GLOAS: See if we need to cached PTC for prev/next epoch
|
||||
// PTC for current epoch
|
||||
payloadTimelinessCommittee: ValidatorIndex[][];
|
||||
|
||||
// TODO: Helper stats
|
||||
syncPeriod: SyncPeriod;
|
||||
|
||||
@@ -276,6 +282,7 @@ export class EpochCache {
|
||||
previousTargetUnslashedBalanceIncrements: number;
|
||||
currentSyncCommitteeIndexed: SyncCommitteeCache;
|
||||
nextSyncCommitteeIndexed: SyncCommitteeCache;
|
||||
payloadTimelinessCommittee: ValidatorIndex[][];
|
||||
epoch: Epoch;
|
||||
syncPeriod: SyncPeriod;
|
||||
}) {
|
||||
@@ -307,6 +314,7 @@ export class EpochCache {
|
||||
this.previousTargetUnslashedBalanceIncrements = data.previousTargetUnslashedBalanceIncrements;
|
||||
this.currentSyncCommitteeIndexed = data.currentSyncCommitteeIndexed;
|
||||
this.nextSyncCommitteeIndexed = data.nextSyncCommitteeIndexed;
|
||||
this.payloadTimelinessCommittee = data.payloadTimelinessCommittee;
|
||||
this.epoch = data.epoch;
|
||||
this.syncPeriod = data.syncPeriod;
|
||||
}
|
||||
@@ -485,6 +493,17 @@ export class EpochCache {
|
||||
nextSyncCommitteeIndexed = new SyncCommitteeCacheEmpty();
|
||||
}
|
||||
|
||||
// Compute PTC for this epoch
|
||||
let payloadTimelinessCommittee: ValidatorIndex[][] = [];
|
||||
if (currentEpoch >= config.GLOAS_FORK_EPOCH) {
|
||||
payloadTimelinessCommittee = naiveGetPayloadTimlinessCommitteeIndices(
|
||||
state as BeaconStateGloas,
|
||||
currentShuffling,
|
||||
effectiveBalanceIncrements,
|
||||
currentEpoch
|
||||
);
|
||||
}
|
||||
|
||||
// Precompute churnLimit for efficient initiateValidatorExit() during block proposing MUST be recompute everytime the
|
||||
// active validator indices set changes in size. Validators change active status only when:
|
||||
// - validator.activation_epoch is set. Only changes in process_registry_updates() if validator can be activated. If
|
||||
@@ -559,6 +578,7 @@ export class EpochCache {
|
||||
currentTargetUnslashedBalanceIncrements,
|
||||
currentSyncCommitteeIndexed,
|
||||
nextSyncCommitteeIndexed,
|
||||
payloadTimelinessCommittee: payloadTimelinessCommittee,
|
||||
epoch: currentEpoch,
|
||||
syncPeriod: computeSyncPeriodAtEpoch(currentEpoch),
|
||||
});
|
||||
@@ -605,6 +625,7 @@ export class EpochCache {
|
||||
currentTargetUnslashedBalanceIncrements: this.currentTargetUnslashedBalanceIncrements,
|
||||
currentSyncCommitteeIndexed: this.currentSyncCommitteeIndexed,
|
||||
nextSyncCommitteeIndexed: this.nextSyncCommitteeIndexed,
|
||||
payloadTimelinessCommittee: this.payloadTimelinessCommittee,
|
||||
epoch: this.epoch,
|
||||
syncPeriod: this.syncPeriod,
|
||||
});
|
||||
@@ -750,6 +771,14 @@ export class EpochCache {
|
||||
const epochAfterUpcoming = upcomingEpoch + 1;
|
||||
|
||||
this.proposersPrevEpoch = this.proposers;
|
||||
if (upcomingEpoch >= this.config.GLOAS_FORK_EPOCH) {
|
||||
this.payloadTimelinessCommittee = naiveGetPayloadTimlinessCommitteeIndices(
|
||||
state as BeaconStateGloas,
|
||||
this.currentShuffling,
|
||||
this.effectiveBalanceIncrements,
|
||||
upcomingEpoch
|
||||
);
|
||||
}
|
||||
if (upcomingEpoch >= this.config.FULU_FORK_EPOCH) {
|
||||
// Populate proposer cache with lookahead from state
|
||||
const proposerLookahead = (state as CachedBeaconStateFulu).proposerLookahead.getAll();
|
||||
@@ -1151,6 +1180,34 @@ export class EpochCache {
|
||||
isPostElectra(): boolean {
|
||||
return this.epoch >= this.config.ELECTRA_FORK_EPOCH;
|
||||
}
|
||||
|
||||
getPayloadTimelinessCommittee(slot: Slot): ValidatorIndex[] {
|
||||
const epoch = computeEpochAtSlot(slot);
|
||||
|
||||
if (epoch < this.config.GLOAS_FORK_EPOCH) {
|
||||
throw new Error("Payload Timeliness Committee is not available before gloas fork");
|
||||
}
|
||||
|
||||
if (epoch === this.epoch) {
|
||||
return this.payloadTimelinessCommittee[slot % SLOTS_PER_EPOCH];
|
||||
}
|
||||
|
||||
throw new Error(`Payload Timeliness Committee is not available for slot=${slot}`);
|
||||
}
|
||||
|
||||
getIndexedPayloadAttestation(
|
||||
slot: Slot,
|
||||
payloadAttestation: gloas.PayloadAttestation
|
||||
): gloas.IndexedPayloadAttestation {
|
||||
const payloadTimelinessCommittee = this.getPayloadTimelinessCommittee(slot);
|
||||
const attestingIndices = payloadAttestation.aggregationBits.intersectValues(payloadTimelinessCommittee);
|
||||
|
||||
return {
|
||||
attestingIndices: attestingIndices.sort((a, b) => a - b),
|
||||
data: payloadAttestation.data,
|
||||
signature: payloadAttestation.signature,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function getEffectiveBalanceIncrementsByteLen(validatorCount: number): number {
|
||||
|
||||
@@ -12,9 +12,11 @@ import {
|
||||
CachedBeaconStateCapella,
|
||||
CachedBeaconStateElectra,
|
||||
CachedBeaconStateFulu,
|
||||
CachedBeaconStateGloas,
|
||||
CachedBeaconStatePhase0,
|
||||
EpochTransitionCache,
|
||||
} from "../types.js";
|
||||
import {processBuilderPendingPayments} from "./processBuilderPendingPayments.ts";
|
||||
import {processEffectiveBalanceUpdates} from "./processEffectiveBalanceUpdates.js";
|
||||
import {processEth1DataReset} from "./processEth1DataReset.js";
|
||||
import {processHistoricalRootsUpdate} from "./processHistoricalRootsUpdate.js";
|
||||
@@ -53,6 +55,7 @@ export {
|
||||
processPendingDeposits,
|
||||
processPendingConsolidations,
|
||||
processProposerLookahead,
|
||||
processBuilderPendingPayments,
|
||||
};
|
||||
|
||||
export {computeUnrealizedCheckpoints} from "./computeUnrealizedCheckpoints.js";
|
||||
@@ -78,6 +81,7 @@ export enum EpochTransitionStep {
|
||||
processPendingDeposits = "processPendingDeposits",
|
||||
processPendingConsolidations = "processPendingConsolidations",
|
||||
processProposerLookahead = "processProposerLookahead",
|
||||
processBuilderPendingPayments = "processBuilderPendingPayments",
|
||||
}
|
||||
|
||||
export function processEpoch(
|
||||
@@ -154,6 +158,14 @@ export function processEpoch(
|
||||
}
|
||||
}
|
||||
|
||||
if (fork >= ForkSeq.gloas) {
|
||||
const timer = metrics?.epochTransitionStepTime.startTimer({
|
||||
step: EpochTransitionStep.processBuilderPendingPayments,
|
||||
});
|
||||
processBuilderPendingPayments(state as CachedBeaconStateGloas);
|
||||
timer?.();
|
||||
}
|
||||
|
||||
{
|
||||
const timer = metrics?.epochTransitionStepTime.startTimer({
|
||||
step: EpochTransitionStep.processEffectiveBalanceUpdates,
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
import {SLOTS_PER_EPOCH} from "@lodestar/params";
|
||||
import {ssz} from "@lodestar/types";
|
||||
import {CachedBeaconStateGloas} from "../types.ts";
|
||||
import {computeExitEpochAndUpdateChurn} from "../util/epoch.ts";
|
||||
import {getBuilderPaymentQuorumThreshold} from "../util/gloas.ts";
|
||||
|
||||
/**
|
||||
* Processes the builder pending payments from the previous epoch.
|
||||
*/
|
||||
export function processBuilderPendingPayments(state: CachedBeaconStateGloas): void {
|
||||
const quorum = getBuilderPaymentQuorumThreshold(state);
|
||||
|
||||
for (let i = 0; i < SLOTS_PER_EPOCH; i++) {
|
||||
const payment = state.builderPendingPayments.get(i);
|
||||
if (payment.weight > quorum) {
|
||||
const exitQueueEpoch = computeExitEpochAndUpdateChurn(state, BigInt(payment.withdrawal.amount));
|
||||
payment.withdrawal.withdrawableEpoch = exitQueueEpoch + state.config.MIN_VALIDATOR_WITHDRAWABILITY_DELAY;
|
||||
|
||||
state.builderPendingWithdrawals.push(payment.withdrawal);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO GLOAS: Optimize this
|
||||
for (let i = 0; i < state.builderPendingPayments.length; i++) {
|
||||
if (i < SLOTS_PER_EPOCH) {
|
||||
state.builderPendingPayments.set(i, state.builderPendingPayments.get(i + SLOTS_PER_EPOCH).clone());
|
||||
} else {
|
||||
state.builderPendingPayments.set(i, ssz.gloas.BuilderPendingPayment.defaultViewDU());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -52,6 +52,7 @@ export type {
|
||||
BeaconStateElectra,
|
||||
BeaconStateExecutions,
|
||||
BeaconStateFulu,
|
||||
BeaconStateGloas,
|
||||
// Non-cached states
|
||||
BeaconStatePhase0,
|
||||
CachedBeaconStateAllForks,
|
||||
@@ -62,6 +63,7 @@ export type {
|
||||
CachedBeaconStateElectra,
|
||||
CachedBeaconStateExecutions,
|
||||
CachedBeaconStateFulu,
|
||||
CachedBeaconStateGloas,
|
||||
CachedBeaconStatePhase0,
|
||||
} from "./types.js";
|
||||
export * from "./util/index.js";
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import {ForkSeq} from "@lodestar/params";
|
||||
import {SignedBeaconBlock, altair, capella} from "@lodestar/types";
|
||||
import {IndexedAttestation, SignedBeaconBlock, altair, capella} from "@lodestar/types";
|
||||
import {getSyncCommitteeSignatureSet} from "../block/processSyncCommittee.js";
|
||||
import {CachedBeaconStateAllForks, CachedBeaconStateAltair} from "../types.js";
|
||||
import {ISignatureSet} from "../util/index.js";
|
||||
@@ -14,6 +14,7 @@ import {getVoluntaryExitsSignatureSets} from "./voluntaryExits.js";
|
||||
export * from "./attesterSlashings.js";
|
||||
export * from "./blsToExecutionChange.js";
|
||||
export * from "./indexedAttestation.js";
|
||||
export * from "./indexedPayloadAttestation.ts";
|
||||
export * from "./proposer.js";
|
||||
export * from "./proposerSlashings.js";
|
||||
export * from "./randao.js";
|
||||
@@ -26,6 +27,7 @@ export * from "./voluntaryExits.js";
|
||||
export function getBlockSignatureSets(
|
||||
state: CachedBeaconStateAllForks,
|
||||
signedBlock: SignedBeaconBlock,
|
||||
indexedAttestations: IndexedAttestation[],
|
||||
opts?: {
|
||||
/** Useful since block proposer signature is verified beforehand on gossip validation */
|
||||
skipProposerSignature?: boolean;
|
||||
@@ -38,7 +40,7 @@ export function getBlockSignatureSets(
|
||||
getRandaoRevealSignatureSet(state, signedBlock.message),
|
||||
...getProposerSlashingsSignatureSets(state, signedBlock),
|
||||
...getAttesterSlashingsSignatureSets(state, signedBlock),
|
||||
...getAttestationsSignatureSets(state, signedBlock),
|
||||
...getAttestationsSignatureSets(state, signedBlock, indexedAttestations),
|
||||
...getVoluntaryExitsSignatureSets(state, signedBlock),
|
||||
];
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user