mirror of
https://github.com/OffchainLabs/prysm.git
synced 2026-01-10 13:58:09 -05:00
Compare commits
20 Commits
migrate-rp
...
init-clien
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dd37678802 | ||
|
|
652134c1ff | ||
|
|
32a0da89e8 | ||
|
|
a5e73640de | ||
|
|
9526860ba5 | ||
|
|
6cbaddd79c | ||
|
|
b7ab6e4d9e | ||
|
|
d43a32a99d | ||
|
|
4a4b34d43c | ||
|
|
08156a4b72 | ||
|
|
4fe6c79ec9 | ||
|
|
526843ba27 | ||
|
|
bfdd154081 | ||
|
|
cd95c571f9 | ||
|
|
fac981b32a | ||
|
|
26476dcdd4 | ||
|
|
45e41eb8ea | ||
|
|
dce3d1e8ea | ||
|
|
bd255a5790 | ||
|
|
ad400fcf6e |
204
.bazelrc
204
.bazelrc
@@ -1,16 +1,9 @@
|
||||
# Import bazelrc presets
|
||||
import %workspace%/build/bazelrc/convenience.bazelrc
|
||||
import %workspace%/build/bazelrc/correctness.bazelrc
|
||||
import %workspace%/build/bazelrc/cross.bazelrc
|
||||
import %workspace%/build/bazelrc/debug.bazelrc
|
||||
import %workspace%/build/bazelrc/hermetic-cc.bazelrc
|
||||
import %workspace%/build/bazelrc/performance.bazelrc
|
||||
# Print warnings for tests with inappropriate test size or timeout.
|
||||
test --test_verbose_timeout_warnings
|
||||
|
||||
# hermetic_cc_toolchain v3.0.1 required changes.
|
||||
common --enable_platform_specific_config
|
||||
build:linux --sandbox_add_mount_pair=/tmp
|
||||
build:macos --sandbox_add_mount_pair=/var/tmp
|
||||
build:windows --sandbox_add_mount_pair=C:\Temp
|
||||
# Only build test targets when running bazel test //...
|
||||
test --build_tests_only
|
||||
test --test_output=errors
|
||||
|
||||
# E2E run with debug gotag
|
||||
test:e2e --define gotags=debug
|
||||
@@ -18,10 +11,26 @@ test:e2e --define gotags=debug
|
||||
# Clearly indicate that coverage is enabled to disable certain nogo checks.
|
||||
coverage --define=coverage_enabled=1
|
||||
|
||||
# Fix for rules_docker. See: https://github.com/bazelbuild/rules_docker/issues/842
|
||||
build --host_force_python=PY2
|
||||
test --host_force_python=PY2
|
||||
run --host_force_python=PY2
|
||||
|
||||
# Networking is blocked for tests by default, add "requires-network" tag to your test if networking
|
||||
# is required within the sandbox. Network sandboxing only works on linux.
|
||||
build --sandbox_default_allow_network=false
|
||||
|
||||
# Stamp binaries with git information
|
||||
build --workspace_status_command=./hack/workspace_status.sh
|
||||
build --stamp
|
||||
|
||||
# Prevent PATH changes from rebuilding when switching from IDE to command line.
|
||||
build --incompatible_strict_action_env
|
||||
test --incompatible_strict_action_env
|
||||
run --incompatible_strict_action_env
|
||||
|
||||
build --define blst_disabled=false
|
||||
test --define blst_disabled=false
|
||||
run --define blst_disabled=false
|
||||
|
||||
build:blst_disabled --define blst_disabled=true
|
||||
@@ -32,25 +41,182 @@ build:minimal --@io_bazel_rules_go//go/config:tags=minimal
|
||||
|
||||
# Release flags
|
||||
build:release --compilation_mode=opt
|
||||
build:release --stamp
|
||||
build:release --define pgo_enabled=1
|
||||
build:release --config=llvm
|
||||
|
||||
# LLVM compiler for building C/C++ dependencies.
|
||||
build:llvm --crosstool_top=@llvm_toolchain//:toolchain
|
||||
build:llvm --define compiler=llvm
|
||||
build:llvm --copt -fno-sanitize=vptr,function
|
||||
build:llvm --linkopt -fno-sanitize=vptr,function
|
||||
|
||||
build:asan --copt -fsanitize=address,undefined
|
||||
build:asan --copt -fno-omit-frame-pointer
|
||||
build:asan --linkopt -fsanitize=address,undefined
|
||||
build:asan --copt -fno-sanitize=vptr,function
|
||||
build:asan --linkopt -fno-sanitize=vptr,function
|
||||
build:asan --copt -DADDRESS_SANITIZER=1
|
||||
build:asan --copt -D__SANITIZE_ADDRESS__
|
||||
build:asan --linkopt -ldl
|
||||
|
||||
build:llvm-asan --config=llvm
|
||||
build:llvm-asan --config=asan
|
||||
build:llvm-asan --linkopt -fuse-ld=ld.lld
|
||||
|
||||
build:fuzz --@io_bazel_rules_go//go/config:tags=fuzz
|
||||
|
||||
# Build binary with cgo symbolizer for debugging / profiling.
|
||||
build:cgo_symbolizer --config=llvm
|
||||
build:cgo_symbolizer --copt=-g
|
||||
build:cgo_symbolizer --define=USE_CGO_SYMBOLIZER=true
|
||||
build:cgo_symbolizer -c dbg
|
||||
build:cgo_symbolizer --define=gotags=cgosymbolizer_enabled
|
||||
|
||||
# multi-arch cross-compiling toolchain configs:
|
||||
-----------------------------------------------
|
||||
build:cross --crosstool_top=@prysm_toolchains//:multiarch_toolchain
|
||||
build:cross --host_platform=@io_bazel_rules_go//go/toolchain:linux_amd64
|
||||
build:cross --host_crosstool_top=@prysm_toolchains//:hostonly_toolchain
|
||||
|
||||
# linux_amd64 config for cross compiler toolchain, not strictly necessary since host/exec env is amd64
|
||||
build:linux_amd64 --platforms=@io_bazel_rules_go//go/toolchain:linux_amd64_cgo
|
||||
|
||||
# osx_amd64 config for cross compiler toolchain
|
||||
build:osx_amd64 --config=cross
|
||||
build:osx_amd64 --platforms=@io_bazel_rules_go//go/toolchain:darwin_amd64_cgo
|
||||
build:osx_amd64 --compiler=osxcross
|
||||
|
||||
# windows
|
||||
build:windows_amd64 --config=cross
|
||||
build:windows_amd64 --platforms=@io_bazel_rules_go//go/toolchain:windows_amd64_cgo
|
||||
build:windows_amd64 --compiler=mingw-w64
|
||||
|
||||
# linux_arm64 conifg for cross compiler toolchain
|
||||
build:linux_arm64 --config=cross
|
||||
build:linux_arm64 --platforms=@io_bazel_rules_go//go/toolchain:linux_arm64_cgo
|
||||
build:linux_arm64 --copt=-funsafe-math-optimizations
|
||||
build:linux_arm64 --copt=-ftree-vectorize
|
||||
build:linux_arm64 --copt=-fomit-frame-pointer
|
||||
build:linux_arm64 --cpu=aarch64
|
||||
build:linux_arm64 --compiler=clang
|
||||
build:linux_arm64 --copt=-march=armv8-a
|
||||
|
||||
|
||||
# toolchain build debug configs
|
||||
#------------------------------
|
||||
build:debug --sandbox_debug
|
||||
build:debug --toolchain_resolution_debug=".*"
|
||||
build:debug --toolchain_resolution_debug
|
||||
build:debug --verbose_failures
|
||||
build:debug -s
|
||||
|
||||
# windows debug
|
||||
build:windows_amd64_debug --config=windows_amd64
|
||||
build:windows_amd64_debug --config=debug
|
||||
|
||||
# osx_amd64 debug config
|
||||
build:osx_amd64_debug --config=debug
|
||||
build:osx_amd64_debug --config=osx_amd64
|
||||
|
||||
# linux_arm64_debug
|
||||
build:linux_arm64_debug --config=linux_arm64
|
||||
build:linux_arm64_debug --config=debug
|
||||
|
||||
# linux_amd64_debug
|
||||
build:linux_amd64_debug --config=linux_amd64
|
||||
build:linux_amd64_debug --config=debug
|
||||
|
||||
|
||||
# Docker Sandbox Configs
|
||||
#-----------------------
|
||||
# Note all docker sandbox configs must run from a linux x86_64 host
|
||||
# build:docker-sandbox --experimental_docker_image=gcr.io/prysmaticlabs/rbe-worker:latest
|
||||
build:docker-sandbox --spawn_strategy=docker --strategy=Javac=docker --genrule_strategy=docker
|
||||
build:docker-sandbox --define=EXECUTOR=remote
|
||||
build:docker-sandbox --experimental_docker_verbose
|
||||
build:docker-sandbox --experimental_enable_docker_sandbox
|
||||
build:docker-sandbox --crosstool_top=@rbe_ubuntu_clang//cc:toolchain
|
||||
build:docker-sandbox --host_javabase=@rbe_ubuntu_clang//java:jdk
|
||||
build:docker-sandbox --javabase=@rbe_ubuntu_clang//java:jdk
|
||||
build:docker-sandbox --host_java_toolchain=@bazel_tools//tools/jdk:toolchain_hostjdk8
|
||||
build:docker-sandbox --java_toolchain=@bazel_tools//tools/jdk:toolchain_hostjdk8
|
||||
build:docker-sandbox --extra_execution_platforms=@rbe_ubuntu_clang//config:platform
|
||||
build:docker-sandbox --host_platform=@rbe_ubuntu_clang//config:platform
|
||||
build:docker-sandbox --platforms=@rbe_ubuntu_clang//config:platform
|
||||
build:docker-sandbox --extra_toolchains=@prysm_toolchains//:cc-toolchain-multiarch
|
||||
|
||||
# windows_amd64 docker sandbox build config
|
||||
build:windows_amd64_docker --config=docker-sandbox --config=windows_amd64
|
||||
build:windows_amd64_docker_debug --config=windows_amd64_docker --config=debug
|
||||
|
||||
# osx_amd64 docker sandbox build config
|
||||
build:osx_amd64_docker --config=docker-sandbox --config=osx_amd64
|
||||
build:osx_amd64_docker_debug --config=osx_amd64_docker --config=debug
|
||||
|
||||
# linux_arm64 docker sandbox build config
|
||||
build:linux_arm64_docker --config=docker-sandbox --config=linux_arm64
|
||||
build:linux_arm64_docker_debug --config=linux_arm64_docker --config=debug
|
||||
|
||||
# linux_amd64 docker sandbox build config
|
||||
build:linux_amd64_docker --config=docker-sandbox --config=linux_amd64
|
||||
build:linux_amd64_docker_debug --config=linux_amd64_docker --config=debug
|
||||
|
||||
|
||||
# Remote Build Execution
|
||||
#-----------------------
|
||||
# Originally from https://github.com/bazelbuild/bazel-toolchains/blob/master/bazelrc/bazel-2.0.0.bazelrc
|
||||
#
|
||||
# Depending on how many machines are in the remote execution instance, setting
|
||||
# this higher can make builds faster by allowing more jobs to run in parallel.
|
||||
# Setting it too high can result in jobs that timeout, however, while waiting
|
||||
# for a remote machine to execute them.
|
||||
build:remote --jobs=50
|
||||
|
||||
# Set several flags related to specifying the platform, toolchain and java
|
||||
# properties.
|
||||
# These flags should only be used as is for the rbe-ubuntu16-04 container
|
||||
# and need to be adapted to work with other toolchain containers.
|
||||
build:remote --host_javabase=@rbe_ubuntu_clang//java:jdk
|
||||
build:remote --javabase=@rbe_ubuntu_clang//java:jdk
|
||||
build:remote --host_java_toolchain=@bazel_tools//tools/jdk:toolchain_hostjdk8
|
||||
build:remote --java_toolchain=@bazel_tools//tools/jdk:toolchain_hostjdk8
|
||||
build:remote --crosstool_top=@rbe_ubuntu_clang//cc:toolchain
|
||||
build:remote --action_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1
|
||||
# Platform flags:
|
||||
# The toolchain container used for execution is defined in the target indicated
|
||||
# by "extra_execution_platforms", "host_platform" and "platforms".
|
||||
# More about platforms: https://docs.bazel.build/versions/master/platforms.html
|
||||
build:remote --extra_toolchains=@rbe_ubuntu_clang//config:cc-toolchain
|
||||
build:remote --extra_execution_platforms=@rbe_ubuntu_clang//config:platform
|
||||
build:remote --host_platform=@rbe_ubuntu_clang//config:platform
|
||||
build:remote --platforms=@rbe_ubuntu_clang//config:platform
|
||||
|
||||
# Starting with Bazel 0.27.0 strategies do not need to be explicitly
|
||||
# defined. See https://github.com/bazelbuild/bazel/issues/7480
|
||||
build:remote --define=EXECUTOR=remote
|
||||
|
||||
# Enable remote execution so actions are performed on the remote systems.
|
||||
# build:remote --remote_executor=grpcs://remotebuildexecution.googleapis.com
|
||||
|
||||
# Enforce stricter environment rules, which eliminates some non-hermetic
|
||||
# behavior and therefore improves both the remote cache hit rate and the
|
||||
# correctness and repeatability of the build.
|
||||
build:remote --incompatible_strict_action_env=true
|
||||
|
||||
# Set a higher timeout value, just in case.
|
||||
build:remote --remote_timeout=3600
|
||||
|
||||
# Enable authentication. This will pick up application default credentials by
|
||||
# default. You can use --google_credentials=some_file.json to use a service
|
||||
# account credential instead.
|
||||
# build:remote --google_default_credentials=true
|
||||
|
||||
# Enable build without the bytes
|
||||
# See: https://github.com/bazelbuild/bazel/issues/6862
|
||||
build:remote --experimental_remote_download_outputs=toplevel --experimental_inmemory_jdeps_files --experimental_inmemory_dotd_files
|
||||
|
||||
build:remote --remote_local_fallback
|
||||
|
||||
# Ignore GoStdLib with remote caching
|
||||
build --modify_execution_info='GoStdlib.*=+no-remote-cache'
|
||||
|
||||
# Set bazel gotag
|
||||
build --define gotags=bazel
|
||||
|
||||
# Abseil requires c++14 or greater.
|
||||
build --cxxopt=-std=c++20
|
||||
build --host_cxxopt=-std=c++20
|
||||
|
||||
@@ -1 +1 @@
|
||||
7.4.1
|
||||
5.0.0
|
||||
|
||||
@@ -9,8 +9,8 @@
|
||||
#build:remote-cache --strategy=Genrule=standalone
|
||||
|
||||
# Prysm specific remote-cache properties.
|
||||
#build:remote-cache --disk_cache=
|
||||
build:remote-cache --remote_download_minimal
|
||||
build:remote-cache --remote_build_event_upload=minimal
|
||||
build:remote-cache --remote_cache=grpc://bazel-remote-cache:9092
|
||||
build:remote-cache --experimental_remote_downloader=grpc://bazel-remote-cache:9092
|
||||
build:remote-cache --remote_local_fallback
|
||||
@@ -28,10 +28,7 @@ build --experimental_use_hermetic_linux_sandbox
|
||||
# Import workspace options.
|
||||
import %workspace%/.bazelrc
|
||||
|
||||
# Enable blake3 once it is supported in remote cache. See: https://github.com/buchgr/bazel-remote/issues/710
|
||||
# startup --digest_function=blake3
|
||||
|
||||
startup --host_jvm_args=-Xmx8g --host_jvm_args=-Xms4g
|
||||
startup --host_jvm_args=-Xmx4g --host_jvm_args=-Xms2g
|
||||
build --experimental_strict_action_env
|
||||
build --sandbox_tmpfs_path=/tmp
|
||||
build --verbose_failures
|
||||
@@ -41,18 +38,9 @@ build --curses=no --color=no
|
||||
build --keep_going
|
||||
build --test_output=errors
|
||||
build --flaky_test_attempts=5
|
||||
build --build_runfile_links=false # Only build runfile symlink forest when required by local action, test, or run command.
|
||||
# Disabled race detection due to unstable test results under constrained environment build kite
|
||||
# build --features=race
|
||||
|
||||
# Better caching
|
||||
build:nostamp --nostamp
|
||||
|
||||
# Build metadata
|
||||
build --build_metadata=ROLE=CI
|
||||
build --build_metadata=REPO_URL=https://github.com/prysmaticlabs/prysm.git
|
||||
build --workspace_status_command=./hack/workspace_status_ci.sh
|
||||
|
||||
# Buildbuddy
|
||||
build --bes_results_url=https://app.buildbuddy.io/invocation/
|
||||
build --bes_backend=grpcs://remote.buildbuddy.io
|
||||
build:nostamp --workspace_status_command=./hack/workspace_status_ci.sh
|
||||
|
||||
@@ -11,7 +11,7 @@ name = "go"
|
||||
enabled = true
|
||||
|
||||
[analyzers.meta]
|
||||
import_paths = ["github.com/prysmaticlabs/prysm/v5"]
|
||||
import_paths = ["github.com/prysmaticlabs/prysm"]
|
||||
|
||||
[[analyzers]]
|
||||
name = "test-coverage"
|
||||
|
||||
10
.github/CODEOWNERS
vendored
10
.github/CODEOWNERS
vendored
@@ -5,4 +5,12 @@
|
||||
*.bzl @prestonvanloon
|
||||
|
||||
# Anyone on prylabs team can approve dependency updates.
|
||||
deps.bzl @prysmaticlabs/core-team
|
||||
deps.bzl @prysmaticlabs/core-team
|
||||
|
||||
# Radek and Nishant are responsible for changes that can affect the native state feature.
|
||||
# See https://www.notion.so/prysmaticlabs/Native-Beacon-State-Redesign-6cc9744b4ec1439bb34fa829b36a35c1
|
||||
/beacon-chain/state/fieldtrie/ @rkapka @nisdas
|
||||
/beacon-chain/state/v1/ @rkapka @nisdas
|
||||
/beacon-chain/state/v2/ @rkapka @nisdas
|
||||
/beacon-chain/state/v3/ @rkapka @nisdas
|
||||
/beacon-chain/state/state-native/ @rkapka @nisdas
|
||||
|
||||
59
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
59
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
---
|
||||
name: "\U0001F41EBug report"
|
||||
about: Report a bug or problem with running Prysm
|
||||
---
|
||||
<!--💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎
|
||||
|
||||
Hellooo! 😄
|
||||
|
||||
To help us tend to your issue faster, please search our currently open issues before submitting a new one.
|
||||
Existing issues often contain information about workarounds, resolution, or progress updates.
|
||||
|
||||
💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎💎-->
|
||||
|
||||
# 🐞 Bug Report
|
||||
|
||||
### Description
|
||||
|
||||
<!-- ✍️--> A clear and concise description of the problem...
|
||||
|
||||
### Has this worked before in a previous version?
|
||||
|
||||
<!-- Did this behavior use to work in the previous version? -->
|
||||
<!-- ✍️--> Yes, the previous version in which this bug was not present was: ....
|
||||
|
||||
## 🔬 Minimal Reproduction
|
||||
|
||||
<!--
|
||||
Please let us know how we can reproduce this issue. Include the exact method you used to run Prysm along with any flags used in your beacon chain and/or validator. Make sure you don't upload any confidential files or private keys.
|
||||
-->
|
||||
|
||||
## 🔥 Error
|
||||
|
||||
<pre><code>
|
||||
<!-- If the issue is accompanied by an error, please share the error logs with us below. If you have a lot of logs, place make a paste bin with your logs and share the link with us here: -->
|
||||
<!-- ✍️-->
|
||||
|
||||
</code></pre>
|
||||
|
||||
|
||||
## 🌍 Your Environment
|
||||
|
||||
**Operating System:**
|
||||
|
||||
<pre>
|
||||
<code>
|
||||
|
||||
</code>
|
||||
</pre>
|
||||
|
||||
**What version of Prysm are you running? (Which release)**
|
||||
|
||||
<pre>
|
||||
<code>
|
||||
|
||||
</code>
|
||||
</pre>
|
||||
|
||||
**Anything else relevant (validator index / public key)?**
|
||||
|
||||
66
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
66
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -1,66 +0,0 @@
|
||||
name: 🐞 Bug report
|
||||
description: Report a bug or problem with running Prysm
|
||||
labels: ["Bug"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
To help us tend to your issue faster, please search our currently open issues before submitting a new one.
|
||||
Existing issues often contain information about workarounds, resolution, or progress updates.
|
||||
- type: textarea
|
||||
id: what-happened
|
||||
attributes:
|
||||
label: Describe the bug
|
||||
description: |
|
||||
A clear and concise description of the problem...
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: previous-version
|
||||
attributes:
|
||||
label: Has this worked before in a previous version?
|
||||
description: Did this behavior use to work in the previous version?
|
||||
render: text
|
||||
- type: textarea
|
||||
id: reproduction-steps
|
||||
attributes:
|
||||
label: 🔬 Minimal Reproduction
|
||||
description: |
|
||||
Please let us know how we can reproduce this issue.
|
||||
Include the exact method you used to run Prysm along with any flags used in your beacon chain and/or validator.
|
||||
Make sure you don't upload any confidential files or private keys.
|
||||
placeholder: |
|
||||
Steps to reproduce:
|
||||
|
||||
1. Start '...'
|
||||
2. Then '...'
|
||||
3. Check '...'
|
||||
4. See error
|
||||
- type: textarea
|
||||
id: errors
|
||||
attributes:
|
||||
label: Error
|
||||
description: |
|
||||
If the issue is accompanied by an error, please share the error logs with us below.
|
||||
If you have a lot of logs, place make a paste bin with your logs and share the link with us here:
|
||||
render: text
|
||||
- type: dropdown
|
||||
id: platform
|
||||
attributes:
|
||||
label: Platform(s)
|
||||
description: What platform(s) did this occur on?
|
||||
multiple: true
|
||||
options:
|
||||
- Linux (x86)
|
||||
- Linux (ARM)
|
||||
- Mac (Intel)
|
||||
- Mac (Apple Silicon)
|
||||
- Windows (x86)
|
||||
- Windows (ARM)
|
||||
- type: input
|
||||
attributes:
|
||||
label: What version of Prysm are you running? (Which release)
|
||||
description: You can check your Prysm version by running your beacon node or validator with the `--version` flag.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Anything else relevant (validator index / public key)?
|
||||
6
.github/ISSUE_TEMPLATE/feature_request.md
vendored
6
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -16,12 +16,12 @@ Existing issues often contain information about workarounds, resolution, or prog
|
||||
|
||||
### Description
|
||||
|
||||
<!-- ✍️ A clear and concise description of the problem or missing capability... -->
|
||||
<!-- ✍️--> A clear and concise description of the problem or missing capability...
|
||||
|
||||
### Describe the solution you'd like
|
||||
|
||||
<!-- ✍️ If you have a solution in mind, please describe it. -->
|
||||
<!-- ✍️--> If you have a solution in mind, please describe it.
|
||||
|
||||
### Describe alternatives you've considered
|
||||
|
||||
<!-- ✍️ Have you considered any alternative solutions or workarounds? -->
|
||||
<!-- ✍️--> Have you considered any alternative solutions or workarounds?
|
||||
|
||||
7
.github/PULL_REQUEST_TEMPLATE.md
vendored
7
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -10,7 +10,6 @@
|
||||
in review.
|
||||
4. Note that PRs updating dependencies and new Go versions are not accepted.
|
||||
Please file an issue instead.
|
||||
5. A changelog entry is required for user facing issues.
|
||||
-->
|
||||
|
||||
**What type of PR is this?**
|
||||
@@ -29,9 +28,3 @@
|
||||
Fixes #
|
||||
|
||||
**Other notes for review**
|
||||
|
||||
**Acknowledgements**
|
||||
|
||||
- [ ] I have read [CONTRIBUTING.md](https://github.com/prysmaticlabs/prysm/blob/develop/CONTRIBUTING.md).
|
||||
- [ ] I have included a uniquely named [changelog fragment file](https://github.com/prysmaticlabs/prysm/blob/develop/CONTRIBUTING.md#maintaining-changelogmd).
|
||||
- [ ] I have added a description to this PR with sufficient context for reviewers to understand this PR.
|
||||
|
||||
5
.github/actions/gofmt/Dockerfile
vendored
Normal file
5
.github/actions/gofmt/Dockerfile
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
FROM cytopia/gofmt
|
||||
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
12
.github/actions/gofmt/action.yml
vendored
Normal file
12
.github/actions/gofmt/action.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
name: 'Gofmt checker'
|
||||
description: 'Checks that all project files have been properly formatted.'
|
||||
inputs:
|
||||
path:
|
||||
description: 'Path to check'
|
||||
required: true
|
||||
default: './'
|
||||
runs:
|
||||
using: 'docker'
|
||||
image: 'Dockerfile'
|
||||
args:
|
||||
- ${{ inputs.path }}
|
||||
15
.github/actions/gofmt/entrypoint.sh
vendored
Executable file
15
.github/actions/gofmt/entrypoint.sh
vendored
Executable file
@@ -0,0 +1,15 @@
|
||||
#!/bin/sh -l
|
||||
set -e
|
||||
|
||||
cd $GITHUB_WORKSPACE
|
||||
|
||||
# Check if any files are not formatted.
|
||||
nonformatted="$(gofmt -l $1 2>&1)"
|
||||
|
||||
# Return if `go fmt` passes.
|
||||
[ -z "$nonformatted" ] && exit 0
|
||||
|
||||
# Notify of issues with formatting.
|
||||
echo "Following files need to be properly formatted:"
|
||||
echo "$nonformatted"
|
||||
exit 1
|
||||
2
.github/actions/gomodtidy/Dockerfile
vendored
2
.github/actions/gomodtidy/Dockerfile
vendored
@@ -1,4 +1,4 @@
|
||||
FROM golang:1.24-alpine
|
||||
FROM golang:alpine
|
||||
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
|
||||
|
||||
4
.github/actions/gomodtidy/entrypoint.sh
vendored
4
.github/actions/gomodtidy/entrypoint.sh
vendored
@@ -1,8 +1,8 @@
|
||||
#!/bin/sh -l
|
||||
set -e
|
||||
export PATH="$PATH:/usr/local/go/bin"
|
||||
export PATH=$PATH:/usr/local/go/bin
|
||||
|
||||
cd "$GITHUB_WORKSPACE"
|
||||
cd $GITHUB_WORKSPACE
|
||||
|
||||
cp go.mod go.mod.orig
|
||||
cp go.sum go.sum.orig
|
||||
|
||||
34
.github/workflows/changelog.yml
vendored
34
.github/workflows/changelog.yml
vendored
@@ -1,34 +0,0 @@
|
||||
# This workflow will build a golang project
|
||||
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-go
|
||||
|
||||
name: changelog
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [ "develop" ]
|
||||
|
||||
jobs:
|
||||
run-changelog-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout source code
|
||||
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0
|
||||
|
||||
- name: Download unclog binary
|
||||
uses: dsaltares/fetch-gh-release-asset@aa2ab1243d6e0d5b405b973c89fa4d06a2d0fff7 # 1.1.2
|
||||
with:
|
||||
repo: OffchainLabs/unclog
|
||||
version: "tags/v0.1.3"
|
||||
file: "unclog"
|
||||
|
||||
- name: Get new changelog files
|
||||
id: new-changelog-files
|
||||
uses: OffchainLabs/gh-action-changed-files@9200e69727eb73eb060652b19946b8a2fdfb654b # v4.0.8
|
||||
with:
|
||||
files: |
|
||||
changelog/**.md
|
||||
|
||||
- name: Run lint command
|
||||
env:
|
||||
ALL_ADDED_MARKDOWN: ${{ steps.new-changelog-files.outputs.added_files }}
|
||||
run: chmod +x unclog && ./unclog check -fragment-env=ALL_ADDED_MARKDOWN
|
||||
21
.github/workflows/clang-format.yml
vendored
21
.github/workflows/clang-format.yml
vendored
@@ -1,21 +0,0 @@
|
||||
name: Protobuf Format
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ '*' ]
|
||||
pull_request:
|
||||
branches: [ '*' ]
|
||||
merge_group:
|
||||
types: [checks_requested]
|
||||
|
||||
jobs:
|
||||
clang-format-checking:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# Is this step failing for you?
|
||||
# Run: clang-format -i proto/**/*.proto
|
||||
# See: https://clang.llvm.org/docs/ClangFormat.html
|
||||
- uses: RafikFarhad/clang-format-github-action@v3
|
||||
with:
|
||||
sources: "proto/**/*.proto"
|
||||
41
.github/workflows/dappnode-release-trigger.yml
vendored
Normal file
41
.github/workflows/dappnode-release-trigger.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
name: Update DAppNodePackages
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
dappnode-update-beacon-chain:
|
||||
name: Trigger a beacon-chain release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get latest tag
|
||||
id: get_tag
|
||||
run: echo ::set-output name=TAG::${GITHUB_REF/refs\/tags\//}
|
||||
- name: Send dispatch event to DAppNodePackage-prysm-beacon-chain
|
||||
env:
|
||||
DISPATCH_REPO: dappnode/DAppNodePackage-prysm-beacon-chain
|
||||
run: |
|
||||
curl -v -X POST -u "${{ secrets.PAT_GITHUB }}" \
|
||||
-H "Accept: application/vnd.github.everest-preview+json" \
|
||||
-H "Content-Type: application/json" \
|
||||
--data '{"event_type":"new_release", "client_payload": { "tag":"${{ steps.get_tag.outputs.TAG }}"}}' \
|
||||
https://api.github.com/repos/$DISPATCH_REPO/dispatches
|
||||
|
||||
dappnode-update-validator:
|
||||
name: Trigger a validator release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get latest tag
|
||||
id: get_tag
|
||||
run: echo ::set-output name=TAG::${GITHUB_REF/refs\/tags\//}
|
||||
- name: Send dispatch event to DAppNodePackage validator repository
|
||||
env:
|
||||
DISPATCH_REPO: dappnode/DAppNodePackage-prysm-validator
|
||||
run: |
|
||||
curl -v -X POST -u "${{ secrets.PAT_GITHUB }}" \
|
||||
-H "Accept: application/vnd.github.everest-preview+json" \
|
||||
-H "Content-Type: application/json" \
|
||||
--data '{"event_type":"new_release", "client_payload": { "tag":"${{ steps.get_tag.outputs.TAG }}"}}' \
|
||||
https://api.github.com/repos/$DISPATCH_REPO/dispatches
|
||||
45
.github/workflows/fuzz.yml
vendored
45
.github/workflows/fuzz.yml
vendored
@@ -1,45 +0,0 @@
|
||||
name: "fuzz"
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 12 * * *"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
list:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 180
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: '1.23.5'
|
||||
- id: list
|
||||
uses: shogo82148/actions-go-fuzz/list@v0
|
||||
with:
|
||||
tags: fuzz,develop
|
||||
outputs:
|
||||
fuzz-tests: ${{steps.list.outputs.fuzz-tests}}
|
||||
|
||||
fuzz:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 360
|
||||
needs: list
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include: ${{fromJson(needs.list.outputs.fuzz-tests)}}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: '1.23.5'
|
||||
- uses: shogo82148/actions-go-fuzz/run@v0
|
||||
with:
|
||||
packages: ${{ matrix.package }}
|
||||
fuzz-regexp: ${{ matrix.func }}
|
||||
fuzz-time: "20m"
|
||||
tags: fuzz,develop
|
||||
68
.github/workflows/go.yml
vendored
68
.github/workflows/go.yml
vendored
@@ -5,70 +5,54 @@ on:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
branches: [ '*' ]
|
||||
merge_group:
|
||||
types: [checks_requested]
|
||||
|
||||
jobs:
|
||||
formatting:
|
||||
name: Formatting
|
||||
|
||||
check:
|
||||
name: Check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v1
|
||||
|
||||
- name: Go mod tidy checker
|
||||
id: gomodtidy
|
||||
uses: ./.github/actions/gomodtidy
|
||||
|
||||
gosec:
|
||||
name: Gosec scan
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GO111MODULE: on
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up Go 1.24
|
||||
uses: actions/setup-go@v4
|
||||
- name: Gofmt checker
|
||||
id: gofmt
|
||||
uses: ./.github/actions/gofmt
|
||||
with:
|
||||
go-version: '1.24.0'
|
||||
- name: Run Gosec Security Scanner
|
||||
run: | # https://github.com/securego/gosec/issues/469
|
||||
export PATH=$PATH:$(go env GOPATH)/bin
|
||||
go install github.com/securego/gosec/v2/cmd/gosec@v2.22.1
|
||||
gosec -exclude-generated -exclude=G307,G115 -exclude-dir=crypto/bls/herumi ./...
|
||||
path: ./
|
||||
|
||||
lint:
|
||||
name: Lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Go 1.24
|
||||
uses: actions/setup-go@v4
|
||||
- name: GoImports checker
|
||||
id: goimports
|
||||
uses: Jerome1337/goimports-action@v1.0.2
|
||||
with:
|
||||
go-version: '1.24.0'
|
||||
id: go
|
||||
goimports-path: ./
|
||||
|
||||
- name: Gosec security scanner
|
||||
uses: securego/gosec@master
|
||||
with:
|
||||
args: '-exclude=G307 -exclude-dir=crypto/bls/herumi ./...'
|
||||
|
||||
- name: Golangci-lint
|
||||
uses: golangci/golangci-lint-action@v5
|
||||
uses: golangci/golangci-lint-action@v2
|
||||
with:
|
||||
version: v1.64.5
|
||||
args: --config=.golangci.yml --out-${NO_FUTURE}format colored-line-number
|
||||
args: --print-issued-lines --sort-results --no-config --timeout=10m --disable-all -E deadcode -E errcheck -E gosimple --skip-files=validator/web/site_data.go --skip-dirs=proto
|
||||
|
||||
build:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set up Go 1.x
|
||||
uses: actions/setup-go@v4
|
||||
uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: '1.24.0'
|
||||
go-version: ^1.14
|
||||
id: go
|
||||
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Get dependencies
|
||||
run: |
|
||||
@@ -77,14 +61,6 @@ jobs:
|
||||
- name: Build
|
||||
# Use blst tag to allow go and bazel builds for blst.
|
||||
run: go build -v ./...
|
||||
env:
|
||||
CGO_CFLAGS: "-O2 -D__BLST_PORTABLE__"
|
||||
# fuzz leverage go tag based stubs at compile time.
|
||||
# Building and testing with these tags should be checked and enforced at pre-submit.
|
||||
- name: Test for fuzzing
|
||||
run: go test -tags=fuzz,develop ./... -test.run=^Fuzz
|
||||
env:
|
||||
CGO_CFLAGS: "-O2 -D__BLST_PORTABLE__"
|
||||
|
||||
# Tests run via Bazel for now...
|
||||
# - name: Test
|
||||
|
||||
22
.github/workflows/horusec.yaml
vendored
22
.github/workflows/horusec.yaml
vendored
@@ -1,22 +0,0 @@
|
||||
name: Horusec Security Scan
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Runs cron at 16.00 UTC on
|
||||
- cron: '0 0 * * SUN'
|
||||
|
||||
jobs:
|
||||
Horusec_Scan:
|
||||
name: horusec-Scan
|
||||
runs-on: ubuntu-latest
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v2
|
||||
with: # Required when commit authors is enabled
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Running Security Scan
|
||||
run: |
|
||||
curl -fsSL https://raw.githubusercontent.com/ZupIT/horusec/main/deployments/scripts/install.sh | bash -s latest
|
||||
horusec start -t="10000" -p="./" -e="true" -i="**/crypto/bls/herumi/**, **/**/*_test.go, **/third_party/afl/**, **/crypto/keystore/key.go"
|
||||
12
.gitignore
vendored
12
.gitignore
vendored
@@ -32,15 +32,3 @@ dist
|
||||
|
||||
# deepsource cli
|
||||
bin
|
||||
|
||||
# p2p metaData
|
||||
metaData
|
||||
|
||||
# execution API authentication
|
||||
jwt.hex
|
||||
|
||||
# manual testing
|
||||
tmp
|
||||
|
||||
# spectest coverage reports
|
||||
report.txt
|
||||
|
||||
145
.golangci.yml
145
.golangci.yml
@@ -1,90 +1,69 @@
|
||||
run:
|
||||
timeout: 10m
|
||||
go: '1.23.5'
|
||||
|
||||
issues:
|
||||
exclude-files:
|
||||
- validator/web/site_data.go
|
||||
- .*_test.go
|
||||
exclude-dirs:
|
||||
- proto
|
||||
- tools/analyzers
|
||||
linters-settings:
|
||||
govet:
|
||||
check-shadowing: true
|
||||
settings:
|
||||
printf:
|
||||
funcs:
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf
|
||||
golint:
|
||||
min-confidence: 0
|
||||
gocyclo:
|
||||
min-complexity: 10
|
||||
maligned:
|
||||
suggest-new: true
|
||||
dupl:
|
||||
threshold: 100
|
||||
goconst:
|
||||
min-len: 2
|
||||
min-occurrences: 2
|
||||
depguard:
|
||||
list-type: blacklist
|
||||
packages:
|
||||
# logging is allowed only by logutils.Log, logrus
|
||||
# is allowed to use only in logutils package
|
||||
- github.com/sirupsen/logrus
|
||||
misspell:
|
||||
locale: US
|
||||
lll:
|
||||
line-length: 140
|
||||
goimports:
|
||||
local-prefixes: github.com/golangci/golangci-lint
|
||||
gocritic:
|
||||
enabled-tags:
|
||||
- performance
|
||||
- style
|
||||
- experimental
|
||||
disabled-checks:
|
||||
- wrapperFunc
|
||||
|
||||
linters:
|
||||
enable-all: true
|
||||
disable:
|
||||
# Deprecated linters:
|
||||
- govet
|
||||
|
||||
# Disabled for now:
|
||||
- asasalint
|
||||
- bodyclose
|
||||
- containedctx
|
||||
- contextcheck
|
||||
- cyclop
|
||||
- depguard
|
||||
- dogsled
|
||||
- dupl
|
||||
- durationcheck
|
||||
- errname
|
||||
- err113
|
||||
- exhaustive
|
||||
- exhaustruct
|
||||
- forbidigo
|
||||
- forcetypeassert
|
||||
- funlen
|
||||
- gci
|
||||
- gochecknoglobals
|
||||
- gochecknoinits
|
||||
enable:
|
||||
- deadcode
|
||||
- goconst
|
||||
- gocritic
|
||||
- gocyclo
|
||||
- godot
|
||||
- godox
|
||||
- gofumpt
|
||||
- gomoddirectives
|
||||
- goimports
|
||||
- golint
|
||||
- gosec
|
||||
- inamedparam
|
||||
- interfacebloat
|
||||
- intrange
|
||||
- ireturn
|
||||
- lll
|
||||
- maintidx
|
||||
- makezero
|
||||
- mnd
|
||||
- musttag
|
||||
- nakedret
|
||||
- nestif
|
||||
- nilnil
|
||||
- nlreturn
|
||||
- noctx
|
||||
- nolintlint
|
||||
- nonamedreturns
|
||||
- nosprintfhostport
|
||||
- perfsprint
|
||||
- prealloc
|
||||
- predeclared
|
||||
- promlinter
|
||||
- protogetter
|
||||
- recvcheck
|
||||
- revive
|
||||
- spancheck
|
||||
- staticcheck
|
||||
- stylecheck
|
||||
- tagalign
|
||||
- tagliatelle
|
||||
- thelper
|
||||
- misspell
|
||||
- structcheck
|
||||
- typecheck
|
||||
- unparam
|
||||
- usetesting
|
||||
- varnamelen
|
||||
- wrapcheck
|
||||
- wsl
|
||||
- varcheck
|
||||
- gofmt
|
||||
- unused
|
||||
disable-all: true
|
||||
|
||||
linters-settings:
|
||||
gocognit:
|
||||
# TODO: We should target for < 50
|
||||
min-complexity: 65
|
||||
run:
|
||||
skip-dirs:
|
||||
- proto/
|
||||
- ^contracts/
|
||||
deadline: 10m
|
||||
|
||||
output:
|
||||
print-issued-lines: true
|
||||
sort-results: true
|
||||
# golangci.com configuration
|
||||
# https://github.com/golangci/golangci/wiki/Configuration
|
||||
service:
|
||||
golangci-lint-version: 1.15.0 # use the fixed version to not introduce new linters unexpectedly
|
||||
prepare:
|
||||
- echo "here I can run custom commands, but no preparation needed for this repo"
|
||||
|
||||
@@ -26,6 +26,7 @@ approval_rules:
|
||||
only_changed_files:
|
||||
paths:
|
||||
- "*pb.go"
|
||||
- "*pb.gw.go"
|
||||
- "*.bazel"
|
||||
options:
|
||||
ignore_commits_by:
|
||||
@@ -68,6 +69,7 @@ approval_rules:
|
||||
changed_files:
|
||||
ignore:
|
||||
- "*pb.go"
|
||||
- "*pb.gw.go"
|
||||
- "*.bazel"
|
||||
options:
|
||||
ignore_commits_by:
|
||||
|
||||
218
BUILD.bazel
218
BUILD.bazel
@@ -3,8 +3,8 @@ load("@com_github_atlassian_bazel_tools//gometalinter:def.bzl", "gometalinter")
|
||||
load("@com_github_atlassian_bazel_tools//goimports:def.bzl", "goimports")
|
||||
load("@io_kubernetes_build//defs:run_in_workspace.bzl", "workspace_binary")
|
||||
load("@io_bazel_rules_go//go:def.bzl", "nogo")
|
||||
load("@vaticle_bazel_distribution//common:rules.bzl", "assemble_targz", "assemble_versioned")
|
||||
load("@bazel_skylib//rules:common_settings.bzl", "string_setting")
|
||||
load("@prysm//tools/nogo_config:def.bzl", "nogo_config_exclude")
|
||||
|
||||
prefix = "github.com/prysmaticlabs/prysm"
|
||||
|
||||
@@ -12,7 +12,7 @@ exports_files([
|
||||
"LICENSE.md",
|
||||
])
|
||||
|
||||
# gazelle:prefix github.com/prysmaticlabs/prysm/v5
|
||||
# gazelle:prefix github.com/prysmaticlabs/prysm
|
||||
# gazelle:map_kind go_library go_library @prysm//tools/go:def.bzl
|
||||
# gazelle:map_kind go_test go_test @prysm//tools/go:def.bzl
|
||||
# gazelle:map_kind go_repository go_repository @prysm//tools/go:def.bzl
|
||||
@@ -55,6 +55,13 @@ alias(
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
# Protobuf gRPC gateway compiler
|
||||
alias(
|
||||
name = "grpc_gateway_proto_compiler",
|
||||
actual = "@com_github_grpc_ecosystem_grpc_gateway_v2//protoc-gen-grpc-gateway:go_gen_grpc_gateway",
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
gometalinter(
|
||||
name = "gometalinter",
|
||||
config = "//:.gometalinter.json",
|
||||
@@ -76,189 +83,59 @@ workspace_binary(
|
||||
cmd = "@com_github_golang_lint//golint",
|
||||
)
|
||||
|
||||
STATICCHECK_ANALYZERS = [
|
||||
# Enabled static checks. See https://staticcheck.dev/docs/checks/
|
||||
# Please. keep this list sorted. Don't be a bad person by inserting stuff randomly.
|
||||
"sa1000",
|
||||
"sa1001",
|
||||
"sa1002",
|
||||
"sa1003",
|
||||
"sa1004",
|
||||
"sa1005",
|
||||
"sa1006",
|
||||
"sa1007",
|
||||
"sa1008",
|
||||
"sa1010",
|
||||
"sa1011",
|
||||
"sa1012",
|
||||
"sa1013",
|
||||
"sa1014",
|
||||
"sa1015",
|
||||
"sa1016",
|
||||
"sa1017",
|
||||
"sa1018",
|
||||
# "sa1019", # TODO: Fix all uses of deprecated things.
|
||||
"sa1020",
|
||||
"sa1021",
|
||||
"sa1023",
|
||||
"sa1024",
|
||||
"sa1025",
|
||||
"sa1026",
|
||||
"sa1027",
|
||||
"sa1028",
|
||||
"sa1029",
|
||||
"sa1030",
|
||||
"sa2000",
|
||||
"sa2001",
|
||||
"sa2002",
|
||||
"sa2003",
|
||||
"sa3000",
|
||||
"sa3001",
|
||||
"sa4000",
|
||||
"sa4001",
|
||||
"sa4003",
|
||||
"sa4004",
|
||||
"sa4005",
|
||||
"sa4006",
|
||||
"sa4008",
|
||||
"sa4009",
|
||||
"sa4010",
|
||||
"sa4011",
|
||||
"sa4012",
|
||||
"sa4013",
|
||||
"sa4014",
|
||||
"sa4015",
|
||||
"sa4016",
|
||||
"sa4017",
|
||||
"sa4018",
|
||||
"sa4019",
|
||||
"sa4020",
|
||||
"sa4021",
|
||||
"sa4022",
|
||||
"sa4023",
|
||||
"sa4024",
|
||||
"sa4025",
|
||||
"sa4026",
|
||||
"sa4027",
|
||||
"sa4028",
|
||||
"sa4029",
|
||||
"sa4030",
|
||||
"sa4031",
|
||||
"sa4032",
|
||||
"sa5000",
|
||||
"sa5001",
|
||||
"sa5002",
|
||||
"sa5003",
|
||||
"sa5004",
|
||||
"sa5005",
|
||||
"sa5007",
|
||||
"sa5008",
|
||||
"sa5009",
|
||||
"sa5010",
|
||||
"sa5011",
|
||||
"sa5012",
|
||||
"sa6000",
|
||||
"sa6001",
|
||||
"sa6002",
|
||||
"sa6003",
|
||||
"sa6005",
|
||||
"sa6006",
|
||||
"sa9001",
|
||||
"sa9002",
|
||||
"sa9003",
|
||||
"sa9004",
|
||||
"sa9005",
|
||||
"sa9006",
|
||||
"sa9007",
|
||||
"sa9008",
|
||||
]
|
||||
|
||||
nogo_config_exclude(
|
||||
name = "nogo_config_with_excludes",
|
||||
checks = [sa.upper() for sa in STATICCHECK_ANALYZERS],
|
||||
exclude_files = [
|
||||
"external/.*",
|
||||
],
|
||||
input = "nogo_config.json",
|
||||
)
|
||||
|
||||
nogo(
|
||||
name = "nogo",
|
||||
config = ":nogo_config_with_excludes",
|
||||
config = "nogo_config.json",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//tools/analyzers/comparesame:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/unsafeptr:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/unreachable:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/unmarshal:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/tests:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/structtag:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/stdmethods:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/shift:go_default_library",
|
||||
# "@org_golang_x_tools//go/analysis/passes/shadow:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/printf:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/pkgfact:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/nilness:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/nilfunc:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/loopclosure:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/httpresponse:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/findcall:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/deepequalerrors:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/ctrlflow:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/copylock:go_default_library",
|
||||
# "@org_golang_x_tools//go/analysis/passes/cgocall:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/buildtag:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/buildssa:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/bools:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/atomicalign:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/atomic:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/assign:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/inspect:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/asmdecl:go_default_library",
|
||||
"//tools/analyzers/maligned:go_default_library",
|
||||
"//tools/analyzers/cryptorand:go_default_library",
|
||||
"//tools/analyzers/errcheck:go_default_library",
|
||||
"//tools/analyzers/featureconfig:go_default_library",
|
||||
"//tools/analyzers/gocognit:go_default_library",
|
||||
"//tools/analyzers/ineffassign:go_default_library",
|
||||
"//tools/analyzers/interfacechecker:go_default_library",
|
||||
"//tools/analyzers/logruswitherror:go_default_library",
|
||||
"//tools/analyzers/maligned:go_default_library",
|
||||
"//tools/analyzers/comparesame:go_default_library",
|
||||
"//tools/analyzers/shadowpredecl:go_default_library",
|
||||
"//tools/analyzers/nop:go_default_library",
|
||||
"//tools/analyzers/nopanic:go_default_library",
|
||||
"//tools/analyzers/slicedirect:go_default_library",
|
||||
"//tools/analyzers/interfacechecker:go_default_library",
|
||||
"//tools/analyzers/ineffassign:go_default_library",
|
||||
"//tools/analyzers/properpermissions:go_default_library",
|
||||
"//tools/analyzers/recursivelock:go_default_library",
|
||||
"//tools/analyzers/shadowpredecl:go_default_library",
|
||||
"//tools/analyzers/slicedirect:go_default_library",
|
||||
"//tools/analyzers/uintcast:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/appends:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/asmdecl:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/assign:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/atomic:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/atomicalign:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/bools:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/buildssa:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/buildtag:go_default_library",
|
||||
# cgocall disabled
|
||||
#"@org_golang_x_tools//go/analysis/passes/cgocall:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/copylock:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/ctrlflow:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/deepequalerrors:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/defers:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/directive:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/errorsas:go_default_library",
|
||||
# fieldalignment disabled
|
||||
#"@org_golang_x_tools//go/analysis/passes/fieldalignment:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/findcall:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/framepointer:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/httpmux:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/httpresponse:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/ifaceassert:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/inspect:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/loopclosure:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/nilfunc:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/nilness:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/pkgfact:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/printf:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/reflectvaluecompare:go_default_library",
|
||||
# shadow disabled
|
||||
#"@org_golang_x_tools//go/analysis/passes/shadow:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/shift:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/sigchanyzer:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/slog:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/sortslice:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/stdmethods:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/stringintconv:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/structtag:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/testinggoroutine:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/tests:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/timeformat:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/unmarshal:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/unreachable:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/unsafeptr:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/unusedresult:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/unusedwrite:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/usesgenerics:go_default_library",
|
||||
] + select({
|
||||
# nogo checks that fail with coverage enabled.
|
||||
":coverage_enabled": [],
|
||||
"//conditions:default": [
|
||||
"@org_golang_x_tools//go/analysis/passes/composite:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/lostcancel:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/composite:go_default_library",
|
||||
],
|
||||
}) + ["@co_honnef_go_tools//staticcheck/%s:go_default_library" % c for c in STATICCHECK_ANALYZERS],
|
||||
}),
|
||||
)
|
||||
|
||||
config_setting(
|
||||
@@ -266,11 +143,6 @@ config_setting(
|
||||
values = {"define": "coverage_enabled=1"},
|
||||
)
|
||||
|
||||
config_setting(
|
||||
name = "pgo_enabled",
|
||||
values = {"define": "pgo_enabled=1"},
|
||||
)
|
||||
|
||||
common_files = {
|
||||
"//:LICENSE.md": "LICENSE.md",
|
||||
"//:README.md": "README.md",
|
||||
|
||||
3258
CHANGELOG.md
3258
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -1,21 +1,18 @@
|
||||
# Contribution Guidelines
|
||||
|
||||
Note: The latest and most up-to-date documentation can be found on our [docs portal](https://docs.prylabs.network/docs/contribute/contribution-guidelines).
|
||||
Note: The latest and most up to date documenation can be found on our [docs portal](https://docs.prylabs.network/docs/contribute/contribution-guidelines).
|
||||
|
||||
Excited by our work and want to get involved in building out our sharding releases? Or maybe you haven't learned as much about the Ethereum protocol but are a savvy developer?
|
||||
|
||||
You can explore our [Open Issues](https://github.com/prysmaticlabs/prysm/issues) in-the works for our different releases. Feel free to fork our repo and start creating PR’s after assigning yourself to an issue of interest. We are always chatting on [Discord](https://discord.gg/CTYGPUJ) drop us a line there if you want to get more involved or have any questions on our implementation!
|
||||
|
||||
> [!IMPORTANT]
|
||||
> Please, **do not send pull requests for trivial changes**, such as typos, these will be rejected. These types of pull requests incur a cost to reviewers and do not provide much value to the project. If you are unsure, please open an issue first to discuss the change.
|
||||
|
||||
## Contribution Steps
|
||||
|
||||
**1. Set up Prysm following the instructions in README.md.**
|
||||
|
||||
**2. Fork the Prysm repo.**
|
||||
**2. Fork the prysm repo.**
|
||||
|
||||
Sign in to your GitHub account or create a new account if you do not have one already. Then navigate your browser to https://github.com/prysmaticlabs/prysm/. In the upper right hand corner of the page, click “fork”. This will create a copy of the Prysm repo in your account.
|
||||
Sign in to your Github account or create a new account if you do not have one already. Then navigate your browser to https://github.com/prysmaticlabs/prysm/. In the upper right hand corner of the page, click “fork”. This will create a copy of the Prysm repo in your account.
|
||||
|
||||
**3. Create a local clone of Prysm.**
|
||||
|
||||
@@ -26,7 +23,7 @@ $ git clone https://github.com/prysmaticlabs/prysm.git
|
||||
$ cd $GOPATH/src/github.com/prysmaticlabs/prysm
|
||||
```
|
||||
|
||||
**4. Link your local clone to the fork on your GitHub repo.**
|
||||
**4. Link your local clone to the fork on your Github repo.**
|
||||
|
||||
```
|
||||
$ git remote add myprysmrepo https://github.com/<your_github_user_name>/prysm.git
|
||||
@@ -71,7 +68,7 @@ $ go test <file_you_are_working_on>
|
||||
$ git add --all
|
||||
```
|
||||
|
||||
This command stages all the files that you have changed. You can add individual files by specifying the file name or names and eliminating the “-- all”.
|
||||
This command stages all of the files that you have changed. You can add individual files by specifying the file name or names and eliminating the “-- all”.
|
||||
|
||||
**11. Commit the file or files.**
|
||||
|
||||
@@ -99,7 +96,8 @@ If there are conflicts between your edits and those made by others since you sta
|
||||
$ git status
|
||||
```
|
||||
|
||||
Open those files one at a time, and you will see lines inserted by Git that identify the conflicts:
|
||||
Open those files one at a time and you
|
||||
will see lines inserted by Git that identify the conflicts:
|
||||
|
||||
```
|
||||
<<<<<< HEAD
|
||||
@@ -121,21 +119,17 @@ $ git push myrepo feature-in-progress-branch
|
||||
|
||||
**15. Check to be sure your fork of the Prysm repo contains your feature branch with the latest edits.**
|
||||
|
||||
Navigate to your fork of the repo on GitHub. On the upper left where the current branch is listed, change the branch to your feature-in-progress-branch. Open the files that you have worked on and check to make sure they include your changes.
|
||||
Navigate to your fork of the repo on Github. On the upper left where the current branch is listed, change the branch to your feature-in-progress-branch. Open the files that you have worked on and check to make sure they include your changes.
|
||||
|
||||
**16. Add an entry to CHANGELOG.md.**
|
||||
**16. Create a pull request.**
|
||||
|
||||
All PRs must must include a changelog fragment file in the `changelog` directory. If your change is not user-facing or should not be mentioned in the changelog for some other reason, you may use the `Ignored` changelog section in your fragment's header to satisfy this requirement without altering the final release changelog. See the [Maintaining CHANGELOG.md](#maintaining-changelogmd) section for more information.
|
||||
Navigate your browser to https://github.com/prysmaticlabs/prysm and click on the new pull request button. In the “base” box on the left, leave the default selection “base master”, the branch that you want your changes to be applied to. In the “compare” box on the right, select feature-in-progress-branch, the branch containing the changes you want to apply. You will then be asked to answer a few questions about your pull request. After you complete the questionnaire, the pull request will appear in the list of pull requests at https://github.com/prysmaticlabs/prysm/pulls.
|
||||
|
||||
**17. Create a pull request.**
|
||||
|
||||
Navigate your browser to https://github.com/prysmaticlabs/prysm and click on the new pull request button. In the “base” box on the left, leave the default selection “base develop”, the branch that you want your changes to be applied to. In the “compare” box on the right, select feature-in-progress-branch, the branch containing the changes you want to apply. You will then be asked to answer a few questions about your pull request. After you complete the questionnaire, the pull request will appear in the list of pull requests at https://github.com/prysmaticlabs/prysm/pulls. Ensure that you have added an entry to CHANGELOG.md if your PR is a user-facing change. See the [Maintaining CHANGELOG.md](#maintaining-changelogmd) section for more information.
|
||||
|
||||
**18. Respond to comments by Core Contributors.**
|
||||
**17. Respond to comments by Core Contributors.**
|
||||
|
||||
Core Contributors may ask questions and request that you make edits. If you set notifications at the top of the page to “not watching,” you will still be notified by email whenever someone comments on the page of a pull request you have created. If you are asked to modify your pull request, repeat steps 8 through 15, then leave a comment to notify the Core Contributors that the pull request is ready for further review.
|
||||
|
||||
**19. If the number of commits becomes excessive, you may be asked to squash your commits.**
|
||||
**18. If the number of commits becomes excessive, you may be asked to squash your commits.**
|
||||
|
||||
You can do this with an interactive rebase. Start by running the following command to determine the commit that is the base of your branch...
|
||||
|
||||
@@ -143,7 +137,7 @@ Core Contributors may ask questions and request that you make edits. If you set
|
||||
$ git merge-base feature-in-progress-branch prysm/master
|
||||
```
|
||||
|
||||
**20. The previous command will return a commit-hash that you should use in the following command.**
|
||||
**19. The previous command will return a commit-hash that you should use in the following command.**
|
||||
|
||||
```
|
||||
$ git rebase -i commit-hash
|
||||
@@ -157,7 +151,7 @@ pick hash fix a bug
|
||||
pick hash add a feature
|
||||
```
|
||||
|
||||
Replace the word pick with the word “squash” for every line but the first, so you end with ….
|
||||
Replace the word pick with the word “squash” for every line but the first so you end with ….
|
||||
|
||||
```
|
||||
pick hash do some work
|
||||
@@ -167,24 +161,13 @@ squash hash add a feature
|
||||
|
||||
Save and close the file, then a commit command will appear in the terminal that squashes the smaller commits into one. Check to be sure the commit message accurately reflects your changes and then hit enter to execute it.
|
||||
|
||||
**21. Update your pull request with the following command.**
|
||||
**20. Update your pull request with the following command.**
|
||||
|
||||
```
|
||||
$ git push myrepo feature-in-progress-branch -f
|
||||
```
|
||||
|
||||
**22. Finally, again leave a comment to the Core Contributors on the pull request to let them know that the pull request has been updated.**
|
||||
|
||||
## Maintaining CHANGELOG.md
|
||||
|
||||
This project follows the changelog guidelines from [keepachangelog.com](https://keepachangelog.com/en/1.1.0/). In order to minimize conflicts and workflow headaches, we chose to implement a changelog management
|
||||
strategy that uses changelog "fragment" files, managed by our changelog management tool called `unclog`. Each PR must include a new changelog fragment file in the `changelog` directory, as specified by unclog's
|
||||
[README.md](https://github.com/OffchainLabs/unclog?tab=readme-ov-file#what-is-a-changelog-fragment). As the `unclog` README suggests in the [Best Practices](https://github.com/OffchainLabs/unclog?tab=readme-ov-file#best-practices) section,
|
||||
the standard naming convention for your PR's fragment file, to avoid conflicting with another fragment file, is `changelog/<github user name>_<PR branch name>.md`.
|
||||
|
||||
### Releasing
|
||||
|
||||
When a new release is made, the "Unreleased" section should be moved to a new section with the release version and the current date. Then a new "Unreleased" section is made at the top of the file with the categories listed above.
|
||||
**21. Finally, again leave a comment to the Core Contributors on the pull request to let them know that the pull request has been updated.**
|
||||
|
||||
## Contributor Responsibilities
|
||||
|
||||
@@ -195,7 +178,7 @@ We consider two types of contributions to our repo and categorize them as follow
|
||||
Anyone can become a part-time contributor and help out on implementing Ethereum consensus. The responsibilities of a part-time contributor include:
|
||||
|
||||
- Engaging in Gitter conversations, asking the questions on how to begin contributing to the project
|
||||
- Opening up GitHub issues to express interest in code to implement
|
||||
- Opening up github issues to express interest in code to implement
|
||||
- Opening up PRs referencing any open issue in the repo. PRs should include:
|
||||
- Detailed context of what would be required for merge
|
||||
- Tests that are consistent with how other tests are written in our implementation
|
||||
@@ -205,12 +188,12 @@ Anyone can become a part-time contributor and help out on implementing Ethereum
|
||||
|
||||
### Core Contributors
|
||||
|
||||
Core contributors are remote contractors of Prysmatic Labs, LLC. and are considered critical team members of our organization. Core devs have all the responsibilities of part-time contributors plus the majority of the following:
|
||||
Core contributors are remote contractors of Prysmatic Labs, LLC. and are considered critical team members of our organization. Core devs have all of the responsibilities of part-time contributors plus the majority of the following:
|
||||
|
||||
- Stay up to date on the latest beacon chain specification
|
||||
- Monitor GitHub issues and PR’s to make sure owner, labels, descriptions are correct
|
||||
- Monitor github issues and PR’s to make sure owner, labels, descriptions are correct
|
||||
- Formulate independent ideas, suggest new work to do, point out improvements to existing approaches
|
||||
- Participate in code review, ensure code quality is excellent, and ensure high code coverage
|
||||
- Participate in code review, ensure code quality is excellent, and have ensure high code coverage
|
||||
- Help with social media presence, write bi-weekly development update
|
||||
- Represent Prysmatic Labs at events to help spread the word on scalability research and solutions
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Dependency Management in Prysm
|
||||
# Dependency Managagement in Prysm
|
||||
|
||||
Prysm is go project with many complicated dependencies, including some c++ based libraries. There
|
||||
are two parts to Prysm's dependency management. Go modules and bazel managed dependencies. Be sure
|
||||
@@ -28,7 +28,7 @@ including complicated c++ dependencies.
|
||||
One key advantage of Bazel over vanilla `go build` is that Bazel automatically (re)builds generated
|
||||
pb.go files at build time when file changes are present in any protobuf definition file or after
|
||||
any updates to the protobuf compiler or other relevant dependencies. Vanilla go users should run
|
||||
the following scripts often to ensure their generated files are up to date. Furthermore, Prysm
|
||||
the following scripts often to ensure their generated files are up to date. Further more, Prysm
|
||||
generates SSZ marshal related code based on defined data structures. These generated files must
|
||||
also be updated and checked in as frequently.
|
||||
|
||||
@@ -55,7 +55,7 @@ bazel build //beacon-chain --config=release
|
||||
## Adding / updating dependencies
|
||||
|
||||
1. Add your dependency as you would with go modules. I.e. `go get ...`
|
||||
1. Run `bazel run //:gazelle -- update-repos -from_file=go.mod -to_macro=deps.bzl%prysm_deps -prune=true` to update the bazel managed dependencies.
|
||||
1. Run `gazelle update-repos -from_file=go.mod` to update the bazel managed dependencies.
|
||||
|
||||
Example:
|
||||
|
||||
|
||||
106
INTEROP.md
106
INTEROP.md
@@ -2,21 +2,18 @@
|
||||
|
||||
This README details how to setup Prysm for interop testing for usage with other Ethereum consensus clients.
|
||||
|
||||
> [!IMPORTANT]
|
||||
> This guide is likely to be outdated. The Prysm team does not have capacity to troubleshoot
|
||||
> outdated interop guides or instructions. If you experience issues with this guide, please file an
|
||||
> issue for visibility and propose fixes, if possible.
|
||||
|
||||
## Installation & Setup
|
||||
|
||||
1. Install [Bazel](https://docs.bazel.build/versions/master/install.html) **(Recommended)**
|
||||
2. `git clone https://github.com/prysmaticlabs/prysm && cd prysm`
|
||||
3. `bazel build //cmd/...`
|
||||
3. `bazel build //...`
|
||||
|
||||
## Starting from Genesis
|
||||
|
||||
Prysm can be started from a built-in mainnet genesis state, or started with a provided genesis state by
|
||||
using the `--genesis-state` flag and providing a path to the genesis.ssz file.
|
||||
Prysm supports a few ways to quickly launch a beacon node from basic configurations:
|
||||
|
||||
- `NumValidators + GenesisTime`: Launches a beacon node by deterministically generating a state from a num-validators flag along with a genesis time **(Recommended)**
|
||||
- `SSZ Genesis`: Launches a beacon node from a .ssz file containing a SSZ-encoded, genesis beacon state
|
||||
|
||||
## Generating a Genesis State
|
||||
|
||||
@@ -24,34 +21,21 @@ To setup the necessary files for these quick starts, Prysm provides a tool to ge
|
||||
a deterministically generated set of validator private keys following the official interop YAML format
|
||||
[here](https://github.com/ethereum/eth2.0-pm/blob/master/interop/mocked_start).
|
||||
|
||||
You can use `prysmctl` to create a deterministic genesis state for interop.
|
||||
You can use `bazel run //tools/genesis-state-gen` to create a deterministic genesis state for interop.
|
||||
|
||||
```sh
|
||||
# Download (or create) a chain config file.
|
||||
curl https://raw.githubusercontent.com/ethereum/consensus-specs/refs/heads/dev/configs/minimal.yaml -o /tmp/minimal.yaml
|
||||
### Usage
|
||||
|
||||
- **--genesis-time** uint: Unix timestamp used as the genesis time in the generated genesis state (defaults to now)
|
||||
- **--mainnet-config** bool: Select whether genesis state should be generated with mainnet or minimal (default) params
|
||||
- **--num-validators** int: Number of validators to deterministically include in the generated genesis state
|
||||
- **--output-ssz** string: Output filename of the SSZ marshaling of the generated genesis state
|
||||
|
||||
The example below creates 64 validator keys, instantiates a genesis state with those 64 validators and with genesis unix timestamp 1567542540,
|
||||
and finally writes a ssz encoded output to ~/Desktop/genesis.ssz. This file can be used to kickstart the beacon chain in the next section.
|
||||
|
||||
# Run prysmctl to generate genesis with a 2 minute genesis delay and 256 validators.
|
||||
bazel run //cmd/prysmctl --config=minimal -- \
|
||||
testnet generate-genesis \
|
||||
--genesis-time-delay=120 \
|
||||
--num-validators=256 \
|
||||
--output-ssz=/tmp/genesis.ssz \
|
||||
--chain-config-file=/tmp/minimal.yaml
|
||||
```
|
||||
|
||||
The flags are explained below:
|
||||
- `bazel run //cmd/prysmctl` is the bazel command to compile and run prysmctl.
|
||||
- `--config=minimal` is a bazel build time configuration flag to compile Prysm with minimal state constants.
|
||||
- `--` is an argument divider to tell bazel that everything after this divider should be passed as arguments to prysmctl. Without this divider, it isn't clear to bazel if the arguments are meant to be build time arguments or runtime arguments so the operation complains and fails to build without this divider.
|
||||
- `testnet` is the primary command argument for prysmctl.
|
||||
- `generate-genesis` is the subcommand to `testnet` in prysmctl.
|
||||
- `--genesis-time-delay` uint: The number of seconds in the future to define genesis. Example: a value of 60 will set the genesis time to 1 minute in the future. This should be sufficiently large enough to allow for you to start the beacon node before the genesis time.
|
||||
- `--num-validators` int: Number of validators to deterministically include in the generated genesis state
|
||||
- `--output-ssz` string: Output filename of the SSZ marshaling of the generated genesis state
|
||||
- `--chain-config-file` string: Filepath to a chain config yaml file.
|
||||
|
||||
Note: This guide saves items to the `/tmp/` directory which will not persist if your machine is
|
||||
restarted. Consider tweaking the arguments if persistence is needed.
|
||||
bazel run //tools/genesis-state-gen -- --output-ssz ~/Desktop/genesis.ssz --num-validators 64 --genesis-time 1567542540
|
||||
```
|
||||
|
||||
## Launching a Beacon Node + Validator Client
|
||||
|
||||
@@ -60,33 +44,41 @@ restarted. Consider tweaking the arguments if persistence is needed.
|
||||
Open up two terminal windows, run:
|
||||
|
||||
```
|
||||
bazel run //cmd/beacon-chain --config=minimal -- \
|
||||
--minimal-config \
|
||||
--bootstrap-node= \
|
||||
--deposit-contract 0x8A04d14125D0FDCDc742F4A05C051De07232EDa4 \
|
||||
--datadir=/tmp/beacon-chain-minimal-devnet \
|
||||
--force-clear-db \
|
||||
--min-sync-peers=0 \
|
||||
--genesis-state=/tmp/genesis.ssz \
|
||||
--chain-config-file=/tmp/minimal.yaml
|
||||
bazel run //beacon-chain -- \
|
||||
--bootstrap-node= \
|
||||
--deposit-contract $(curl -s https://prylabs.net/contract) \
|
||||
--force-clear-db \
|
||||
--interop-num-validators 64 \
|
||||
--interop-eth1data-votes
|
||||
```
|
||||
|
||||
This will start the system with 256 validators. The flags used can be explained as such:
|
||||
|
||||
- `bazel run //cmd/beacon-chain --config=minimal` builds and runs the beacon node in minimal build configuration.
|
||||
- `--` is a flag divider to distinguish between bazel flags and flags that should be passed to the application. All flags and arguments after this divider are passed to the beacon chain.
|
||||
- `--minimal-config` tells the beacon node to use minimal network configuration. This is different from the compile time state configuration flag `--config=minimal` and both are required.
|
||||
- `--bootstrap-node=` disables the default bootstrap nodes. This prevents the client from attempting to peer with mainnet nodes.
|
||||
- `--datadir=/tmp/beacon-chain-minimal-devnet` sets the data directory in a temporary location. Change this to your preferred destination.
|
||||
- `--force-clear-db` will delete the beaconchain.db file without confirming with the user. This is helpful for iteratively running local devnets without changing the datadir, but less helpful for one off runs where there was no database in the data directory.
|
||||
- `--min-sync-peers=0` allows the beacon node to skip initial sync without peers. This is essential because Prysm expects at least a few peers to start the blockchain.
|
||||
- `--genesis-state=/tmp/genesis.ssz` defines the path to the generated genesis ssz file. The beacon node will use this as the initial genesis state.
|
||||
- `--chain-config-file=/tmp/minimal.yaml` defines the path to the yaml file with the chain configuration.
|
||||
|
||||
As soon as the beacon node has started, start the validator in the other terminal window.
|
||||
This will deterministically generate a beacon genesis state and start
|
||||
the system with 64 validators and the genesis time set to the current unix timestamp.
|
||||
Wait a bit until your beacon chain starts, and in the other window:
|
||||
|
||||
```
|
||||
bazel run //cmd/validator --config=minimal -- --datadir=/tmp/validator --interop-num-validators=256 --minimal-config --suggested-fee-recipient=0x8A04d14125D0FDCDc742F4A05C051De07232EDa4
|
||||
bazel run //validator -- --keymanager=interop --keymanageropts='{"keys":64}'
|
||||
```
|
||||
|
||||
This will launch and kickstart the system with your 256 validators performing their duties accordingly.
|
||||
This will launch and kickstart the system with your 64 validators performing their duties accordingly.
|
||||
|
||||
### Launching from `genesis.ssz`
|
||||
|
||||
Assuming you generated a `genesis.ssz` file with 64 validators, open up two terminal windows, run:
|
||||
|
||||
```
|
||||
bazel run //beacon-chain -- \
|
||||
--bootstrap-node= \
|
||||
--deposit-contract $(curl -s https://prylabs.net/contract) \
|
||||
--force-clear-db \
|
||||
--interop-genesis-state /path/to/genesis.ssz \
|
||||
--interop-eth1data-votes
|
||||
```
|
||||
|
||||
Wait a bit until your beacon chain starts, and in the other window:
|
||||
|
||||
```
|
||||
bazel run //validator -- --keymanager=interop --keymanageropts='{"keys":64}'
|
||||
```
|
||||
|
||||
This will launch and kickstart the system with your 64 validators performing their duties accordingly.
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
###############################################################################
|
||||
# Bazel now uses Bzlmod by default to manage external dependencies.
|
||||
# Please consider migrating your external dependencies from WORKSPACE to MODULE.bazel.
|
||||
#
|
||||
# For more details, please check https://github.com/bazelbuild/bazel/issues/18958
|
||||
###############################################################################
|
||||
110
MODULE.bazel.lock
generated
110
MODULE.bazel.lock
generated
@@ -1,110 +0,0 @@
|
||||
{
|
||||
"lockFileVersion": 11,
|
||||
"registryFileHashes": {
|
||||
"https://bcr.bazel.build/bazel_registry.json": "8a28e4aff06ee60aed2a8c281907fb8bcbf3b753c91fb5a5c57da3215d5b3497",
|
||||
"https://bcr.bazel.build/modules/abseil-cpp/20210324.2/MODULE.bazel": "7cd0312e064fde87c8d1cd79ba06c876bd23630c83466e9500321be55c96ace2",
|
||||
"https://bcr.bazel.build/modules/abseil-cpp/20211102.0/MODULE.bazel": "70390338f7a5106231d20620712f7cccb659cd0e9d073d1991c038eb9fc57589",
|
||||
"https://bcr.bazel.build/modules/abseil-cpp/20211102.0/source.json": "7e3a9adf473e9af076ae485ed649d5641ad50ec5c11718103f34de03170d94ad",
|
||||
"https://bcr.bazel.build/modules/apple_support/1.5.0/MODULE.bazel": "50341a62efbc483e8a2a6aec30994a58749bd7b885e18dd96aa8c33031e558ef",
|
||||
"https://bcr.bazel.build/modules/apple_support/1.5.0/source.json": "eb98a7627c0bc486b57f598ad8da50f6625d974c8f723e9ea71bd39f709c9862",
|
||||
"https://bcr.bazel.build/modules/bazel_features/1.11.0/MODULE.bazel": "f9382337dd5a474c3b7d334c2f83e50b6eaedc284253334cf823044a26de03e8",
|
||||
"https://bcr.bazel.build/modules/bazel_features/1.11.0/source.json": "c9320aa53cd1c441d24bd6b716da087ad7e4ff0d9742a9884587596edfe53015",
|
||||
"https://bcr.bazel.build/modules/bazel_skylib/1.0.3/MODULE.bazel": "bcb0fd896384802d1ad283b4e4eb4d718eebd8cb820b0a2c3a347fb971afd9d8",
|
||||
"https://bcr.bazel.build/modules/bazel_skylib/1.2.1/MODULE.bazel": "f35baf9da0efe45fa3da1696ae906eea3d615ad41e2e3def4aeb4e8bc0ef9a7a",
|
||||
"https://bcr.bazel.build/modules/bazel_skylib/1.3.0/MODULE.bazel": "20228b92868bf5cfc41bda7afc8a8ba2a543201851de39d990ec957b513579c5",
|
||||
"https://bcr.bazel.build/modules/bazel_skylib/1.6.1/MODULE.bazel": "8fdee2dbaace6c252131c00e1de4b165dc65af02ea278476187765e1a617b917",
|
||||
"https://bcr.bazel.build/modules/bazel_skylib/1.6.1/source.json": "082ed5f9837901fada8c68c2f3ddc958bb22b6d654f71dd73f3df30d45d4b749",
|
||||
"https://bcr.bazel.build/modules/buildozer/7.1.2/MODULE.bazel": "2e8dd40ede9c454042645fd8d8d0cd1527966aa5c919de86661e62953cd73d84",
|
||||
"https://bcr.bazel.build/modules/buildozer/7.1.2/source.json": "c9028a501d2db85793a6996205c8de120944f50a0d570438fcae0457a5f9d1f8",
|
||||
"https://bcr.bazel.build/modules/googletest/1.11.0/MODULE.bazel": "3a83f095183f66345ca86aa13c58b59f9f94a2f81999c093d4eeaa2d262d12f4",
|
||||
"https://bcr.bazel.build/modules/googletest/1.11.0/source.json": "c73d9ef4268c91bd0c1cd88f1f9dfa08e814b1dbe89b5f594a9f08ba0244d206",
|
||||
"https://bcr.bazel.build/modules/platforms/0.0.4/MODULE.bazel": "9b328e31ee156f53f3c416a64f8491f7eb731742655a47c9eec4703a71644aee",
|
||||
"https://bcr.bazel.build/modules/platforms/0.0.5/MODULE.bazel": "5733b54ea419d5eaf7997054bb55f6a1d0b5ff8aedf0176fef9eea44f3acda37",
|
||||
"https://bcr.bazel.build/modules/platforms/0.0.6/MODULE.bazel": "ad6eeef431dc52aefd2d77ed20a4b353f8ebf0f4ecdd26a807d2da5aa8cd0615",
|
||||
"https://bcr.bazel.build/modules/platforms/0.0.7/MODULE.bazel": "72fd4a0ede9ee5c021f6a8dd92b503e089f46c227ba2813ff183b71616034814",
|
||||
"https://bcr.bazel.build/modules/platforms/0.0.9/MODULE.bazel": "4a87a60c927b56ddd67db50c89acaa62f4ce2a1d2149ccb63ffd871d5ce29ebc",
|
||||
"https://bcr.bazel.build/modules/platforms/0.0.9/source.json": "cd74d854bf16a9e002fb2ca7b1a421f4403cda29f824a765acd3a8c56f8d43e6",
|
||||
"https://bcr.bazel.build/modules/protobuf/21.7/MODULE.bazel": "a5a29bb89544f9b97edce05642fac225a808b5b7be74038ea3640fae2f8e66a7",
|
||||
"https://bcr.bazel.build/modules/protobuf/21.7/source.json": "bbe500720421e582ff2d18b0802464205138c06056f443184de39fbb8187b09b",
|
||||
"https://bcr.bazel.build/modules/protobuf/3.19.0/MODULE.bazel": "6b5fbb433f760a99a22b18b6850ed5784ef0e9928a72668b66e4d7ccd47db9b0",
|
||||
"https://bcr.bazel.build/modules/protobuf/3.19.6/MODULE.bazel": "9233edc5e1f2ee276a60de3eaa47ac4132302ef9643238f23128fea53ea12858",
|
||||
"https://bcr.bazel.build/modules/rules_cc/0.0.1/MODULE.bazel": "cb2aa0747f84c6c3a78dad4e2049c154f08ab9d166b1273835a8174940365647",
|
||||
"https://bcr.bazel.build/modules/rules_cc/0.0.2/MODULE.bazel": "6915987c90970493ab97393024c156ea8fb9f3bea953b2f3ec05c34f19b5695c",
|
||||
"https://bcr.bazel.build/modules/rules_cc/0.0.8/MODULE.bazel": "964c85c82cfeb6f3855e6a07054fdb159aced38e99a5eecf7bce9d53990afa3e",
|
||||
"https://bcr.bazel.build/modules/rules_cc/0.0.9/MODULE.bazel": "836e76439f354b89afe6a911a7adf59a6b2518fafb174483ad78a2a2fde7b1c5",
|
||||
"https://bcr.bazel.build/modules/rules_cc/0.0.9/source.json": "1f1ba6fea244b616de4a554a0f4983c91a9301640c8fe0dd1d410254115c8430",
|
||||
"https://bcr.bazel.build/modules/rules_java/4.0.0/MODULE.bazel": "5a78a7ae82cd1a33cef56dc578c7d2a46ed0dca12643ee45edbb8417899e6f74",
|
||||
"https://bcr.bazel.build/modules/rules_java/7.6.5/MODULE.bazel": "481164be5e02e4cab6e77a36927683263be56b7e36fef918b458d7a8a1ebadb1",
|
||||
"https://bcr.bazel.build/modules/rules_java/7.6.5/source.json": "a805b889531d1690e3c72a7a7e47a870d00323186a9904b36af83aa3d053ee8d",
|
||||
"https://bcr.bazel.build/modules/rules_jvm_external/4.4.2/MODULE.bazel": "a56b85e418c83eb1839819f0b515c431010160383306d13ec21959ac412d2fe7",
|
||||
"https://bcr.bazel.build/modules/rules_jvm_external/4.4.2/source.json": "a075731e1b46bc8425098512d038d416e966ab19684a10a34f4741295642fc35",
|
||||
"https://bcr.bazel.build/modules/rules_license/0.0.3/MODULE.bazel": "627e9ab0247f7d1e05736b59dbb1b6871373de5ad31c3011880b4133cafd4bd0",
|
||||
"https://bcr.bazel.build/modules/rules_license/0.0.7/MODULE.bazel": "088fbeb0b6a419005b89cf93fe62d9517c0a2b8bb56af3244af65ecfe37e7d5d",
|
||||
"https://bcr.bazel.build/modules/rules_license/0.0.7/source.json": "355cc5737a0f294e560d52b1b7a6492d4fff2caf0bef1a315df5a298fca2d34a",
|
||||
"https://bcr.bazel.build/modules/rules_pkg/0.7.0/MODULE.bazel": "df99f03fc7934a4737122518bb87e667e62d780b610910f0447665a7e2be62dc",
|
||||
"https://bcr.bazel.build/modules/rules_pkg/0.7.0/source.json": "c2557066e0c0342223ba592510ad3d812d4963b9024831f7f66fd0584dd8c66c",
|
||||
"https://bcr.bazel.build/modules/rules_proto/4.0.0/MODULE.bazel": "a7a7b6ce9bee418c1a760b3d84f83a299ad6952f9903c67f19e4edd964894e06",
|
||||
"https://bcr.bazel.build/modules/rules_proto/5.3.0-21.7/MODULE.bazel": "e8dff86b0971688790ae75528fe1813f71809b5afd57facb44dad9e8eca631b7",
|
||||
"https://bcr.bazel.build/modules/rules_proto/5.3.0-21.7/source.json": "d57902c052424dfda0e71646cb12668d39c4620ee0544294d9d941e7d12bc3a9",
|
||||
"https://bcr.bazel.build/modules/rules_python/0.10.2/MODULE.bazel": "cc82bc96f2997baa545ab3ce73f196d040ffb8756fd2d66125a530031cd90e5f",
|
||||
"https://bcr.bazel.build/modules/rules_python/0.22.1/MODULE.bazel": "26114f0c0b5e93018c0c066d6673f1a2c3737c7e90af95eff30cfee38d0bbac7",
|
||||
"https://bcr.bazel.build/modules/rules_python/0.22.1/source.json": "57226905e783bae7c37c2dd662be078728e48fa28ee4324a7eabcafb5a43d014",
|
||||
"https://bcr.bazel.build/modules/rules_python/0.4.0/MODULE.bazel": "9208ee05fd48bf09ac60ed269791cf17fb343db56c8226a720fbb1cdf467166c",
|
||||
"https://bcr.bazel.build/modules/stardoc/0.5.1/MODULE.bazel": "1a05d92974d0c122f5ccf09291442580317cdd859f07a8655f1db9a60374f9f8",
|
||||
"https://bcr.bazel.build/modules/stardoc/0.5.1/source.json": "a96f95e02123320aa015b956f29c00cb818fa891ef823d55148e1a362caacf29",
|
||||
"https://bcr.bazel.build/modules/upb/0.0.0-20220923-a547704/MODULE.bazel": "7298990c00040a0e2f121f6c32544bab27d4452f80d9ce51349b1a28f3005c43",
|
||||
"https://bcr.bazel.build/modules/upb/0.0.0-20220923-a547704/source.json": "f1ef7d3f9e0e26d4b23d1c39b5f5de71f584dd7d1b4ef83d9bbba6ec7a6a6459",
|
||||
"https://bcr.bazel.build/modules/zlib/1.2.11/MODULE.bazel": "07b389abc85fdbca459b69e2ec656ae5622873af3f845e1c9d80fe179f3effa0",
|
||||
"https://bcr.bazel.build/modules/zlib/1.2.12/MODULE.bazel": "3b1a8834ada2a883674be8cbd36ede1b6ec481477ada359cd2d3ddc562340b27",
|
||||
"https://bcr.bazel.build/modules/zlib/1.3.1.bcr.3/MODULE.bazel": "af322bc08976524477c79d1e45e241b6efbeb918c497e8840b8ab116802dda79",
|
||||
"https://bcr.bazel.build/modules/zlib/1.3.1.bcr.3/source.json": "2be409ac3c7601245958cd4fcdff4288be79ed23bd690b4b951f500d54ee6e7d"
|
||||
},
|
||||
"selectedYankedVersions": {},
|
||||
"moduleExtensions": {
|
||||
"@@apple_support~//crosstool:setup.bzl%apple_cc_configure_extension": {
|
||||
"general": {
|
||||
"bzlTransitiveDigest": "PjIds3feoYE8SGbbIq2SFTZy3zmxeO2tQevJZNDo7iY=",
|
||||
"usagesDigest": "+hz7IHWN6A1oVJJWNDB6yZRG+RYhF76wAYItpAeIUIg=",
|
||||
"recordedFileInputs": {},
|
||||
"recordedDirentsInputs": {},
|
||||
"envVariables": {},
|
||||
"generatedRepoSpecs": {
|
||||
"local_config_apple_cc_toolchains": {
|
||||
"bzlFile": "@@apple_support~//crosstool:setup.bzl",
|
||||
"ruleClassName": "_apple_cc_autoconf_toolchains",
|
||||
"attributes": {}
|
||||
},
|
||||
"local_config_apple_cc": {
|
||||
"bzlFile": "@@apple_support~//crosstool:setup.bzl",
|
||||
"ruleClassName": "_apple_cc_autoconf",
|
||||
"attributes": {}
|
||||
}
|
||||
},
|
||||
"recordedRepoMappingEntries": [
|
||||
[
|
||||
"apple_support~",
|
||||
"bazel_tools",
|
||||
"bazel_tools"
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"@@platforms//host:extension.bzl%host_platform": {
|
||||
"general": {
|
||||
"bzlTransitiveDigest": "xelQcPZH8+tmuOHVjL9vDxMnnQNMlwj0SlvgoqBkm4U=",
|
||||
"usagesDigest": "pCYpDQmqMbmiiPI1p2Kd3VLm5T48rRAht5WdW0X2GlA=",
|
||||
"recordedFileInputs": {},
|
||||
"recordedDirentsInputs": {},
|
||||
"envVariables": {},
|
||||
"generatedRepoSpecs": {
|
||||
"host_platform": {
|
||||
"bzlFile": "@@platforms//host:extension.bzl",
|
||||
"ruleClassName": "host_platform_repo",
|
||||
"attributes": {}
|
||||
}
|
||||
},
|
||||
"recordedRepoMappingEntries": []
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
69
README.md
69
README.md
@@ -1,68 +1,35 @@
|
||||
<h1 align="left">Prysm: An Ethereum Consensus Implementation Written in Go</h1>
|
||||
# Prysm: An Ethereum Consensus Implementation Written in Go
|
||||
|
||||
<div align="left">
|
||||
|
||||
[](https://buildkite.com/prysmatic-labs/prysm)
|
||||
[](https://goreportcard.com/report/github.com/prysmaticlabs/prysm)
|
||||
[](https://github.com/ethereum/consensus-specs/tree/v1.4.0)
|
||||
[](https://github.com/ethereum/execution-apis/tree/v1.0.0-beta.2/src/engine)
|
||||
[](https://discord.gg/prysmaticlabs)
|
||||
[](https://www.gitpoap.io/gh/prysmaticlabs/prysm)
|
||||
[](https://github.com/ethereum/consensus-specs/tree/v1.1.8)
|
||||
[](https://discord.gg/CTYGPUJ)
|
||||
|
||||
</div>
|
||||
This is the core repository for Prysm, a [Golang](https://golang.org/) implementation of the [Ethereum Consensus](https://ethereum.org/en/eth2/) specification, developed by [Prysmatic Labs](https://prysmaticlabs.com). See the [Changelog](https://github.com/prysmaticlabs/prysm/releases) for details of the latest releases and upcoming breaking changes.
|
||||
|
||||
---
|
||||
### Getting Started
|
||||
|
||||
## 📖 Overview
|
||||
A detailed set of installation and usage instructions as well as breakdowns of each individual component are available in the [official documentation portal](https://docs.prylabs.network). If you still have questions, feel free to stop by our [Discord](https://discord.gg/CTYGPUJ).
|
||||
|
||||
This is the core repository for Prysm, a [Golang](https://golang.org/) implementation of the [Ethereum Consensus](https://ethereum.org/en/developers/docs/consensus-mechanisms/#proof-of-stake) [specification](https://github.com/ethereum/consensus-specs), developed by [Offchain Labs](https://www.offchainlabs.com).
|
||||
### Staking on Mainnet
|
||||
|
||||
See the [Changelog](https://github.com/prysmaticlabs/prysm/releases) for details of the latest releases and upcoming breaking changes.
|
||||
To participate in staking, you can join the [official eth2 launchpad](https://launchpad.ethereum.org). The launchpad is the only recommended way to become a validator on mainnet. You can explore validator rewards/penalties via Bitfly's block explorer: [beaconcha.in](https://beaconcha.in), and follow the latest blocks added to the chain on [beaconscan](https://beaconscan.com).
|
||||
|
||||
---
|
||||
|
||||
## 🚀 Getting Started
|
||||
|
||||
A detailed set of installation and usage instructions as well as breakdowns of each individual component are available in the **[official documentation portal](https://docs.prylabs.network)**.
|
||||
|
||||
💬 **Need help?** Join our **[Discord Community](https://discord.gg/prysmaticlabs)** for support.
|
||||
|
||||
---
|
||||
|
||||
## 🏆 Staking on Mainnet
|
||||
|
||||
To participate in staking, you can join the **[official Ethereum launchpad](https://launchpad.ethereum.org)**. The launchpad is the **only recommended** way to become a validator on mainnet.
|
||||
|
||||
🔍 Explore validator rewards/penalties:
|
||||
|
||||
- **[beaconcha.in](https://beaconcha.in)**
|
||||
- **[beaconscan](https://beaconscan.com)**
|
||||
|
||||
---
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
### 🔥 Branches
|
||||
|
||||
## Contributing
|
||||
### Branches
|
||||
Prysm maintains two permanent branches:
|
||||
|
||||
- **[`master`](https://github.com/prysmaticlabs/prysm/tree/master)** - This points to the latest stable release. It is ideal for most users.
|
||||
- **[`develop`](https://github.com/prysmaticlabs/prysm/tree/develop)** - This is used for development and contains the latest PRs. Developers should base their PRs on this branch.
|
||||
* [master](https://github.com/prysmaticlabs/prysm/tree/master): This points to the latest stable release. It is ideal for most users.
|
||||
* [develop](https://github.com/prysmaticlabs/prysm/tree/develop): This is used for development, it contains the latest PRs. Developers should base their PRs on this branch.
|
||||
|
||||
### 🛠 Contribution Guide
|
||||
### Guide
|
||||
Want to get involved? Check out our [Contribution Guide](https://docs.prylabs.network/docs/contribute/contribution-guidelines/) to learn more!
|
||||
|
||||
Want to get involved? Check out our **[Contribution Guide](https://docs.prylabs.network/docs/contribute/contribution-guidelines/)** to learn more!
|
||||
## License
|
||||
|
||||
---
|
||||
[GNU General Public License v3.0](https://www.gnu.org/licenses/gpl-3.0.en.html)
|
||||
|
||||
## 📜 License
|
||||
## Legal Disclaimer
|
||||
|
||||
[](https://www.gnu.org/licenses/gpl-3.0.en.html)
|
||||
|
||||
This project is licensed under the **GNU General Public License v3.0**.
|
||||
|
||||
---
|
||||
|
||||
## ⚖️ Legal Disclaimer
|
||||
|
||||
📜 [Terms of Use](/TERMS_OF_SERVICE.md)
|
||||
[Terms of Use](/TERMS_OF_SERVICE.md)
|
||||
|
||||
@@ -1,53 +1,45 @@
|
||||
# Terms of Use
|
||||
Effective as of November 2, 2023
|
||||
## Terms of Use
|
||||
|
||||
By downloading, accessing or using the Prysm implementation (“Prysm”), you (referenced herein as “you” or the “user”) certify that you have read and agreed to the terms and conditions below (the “Terms”) which form a binding contract between you and Offchain Labs, Inc. (as successor in interest to Prysmatic Labs LLC) (referenced herein as “Offchain Labs”, “we” or “us”). If you do not agree to the Terms, do not download or use Prysm. Additionally, the Terms of Use available at https://arbitrum.io/tos (or any successor site, the “OCL Terms of Use”) are hereby incorporated by reference into these Terms. In the event of any conflict between provisions set forth herein and those set forth in the OCL Terms of Use, the provisions set forth herein shall control.
|
||||
Effective as of Oct 14, 2020
|
||||
|
||||
## About Prysm
|
||||
By downloading, accessing or using the Prysm implementation (“Prysm”), you (referenced herein as “you” or the “user”) certify that you have read and agreed to the terms and conditions below (the “Terms”) which form a binding contract between you and Prysmatic Labs (referenced herein as “we” or “us”). If you do not agree to the Terms, do not download or use Prysm.
|
||||
|
||||
Prysm is a client implementation for the Ethereum blockchain’s consensus protocol. To participate in the network, a user must send ETH from the Ethereum mainnet blockchain to a validator deposit smart contract on Ethereum mainnet. Validators participate in proposing and voting on blocks in the protocol, and the network applies rewards/penalties based on their behavior. A detailed set of installation and usage instructions as well as breakdowns of each individual component are available in the official documentation portal, however, we do not warrant the accuracy, completeness or usefulness of this documentation. Any reliance you place on such information is strictly at your own risk.
|
||||
### About Prysm
|
||||
Prysm is a client implementation for Ethereum consensus protocol for a proof-of-stake blockchain. To participate in the network, a user must send ETH from the Eth1.0 chain into a validator deposit contract, which will queue in the user as a validator in the system. Validators participate in proposing and voting on blocks in the protocol, and the network applies rewards/penalties based on their behavior. A detailed set of installation and usage instructions as well as breakdowns of each individual component are available in the official documentation portal, however, we do not warrant the accuracy, completeness or usefulness of this documentation. Any reliance you place on such information is strictly at your own risk.
|
||||
|
||||
## Licensing Terms
|
||||
Prysm is an open-source software program licensed pursuant to the GNU General Public License v3.0.
|
||||
The Offchain Labs name, the term “Prysm” and all related names, logos, product and service names, designs and slogans are trademarks of Offchain Labs or its affiliates and/or licensors. You must not use such marks without our prior written permission.
|
||||
PLEASE READ THESE TERMS CAREFULLY, AS THE OCL TERMS OF USE INCORPORATED BY REFERENCE HEREIN CONTAIN AN AGREEMENT TO ARBITRATE AND OTHER IMPORTANT INFORMATION REGARDING YOUR LEGAL RIGHTS, REMEDIES, AND OBLIGATIONS. THE AGREEMENT TO ARBITRATE REQUIRES (WITH LIMITED EXCEPTION) THAT YOU SUBMIT CLAIMS YOU HAVE AGAINST US TO BINDING AND FINAL ARBITRATION, AND FURTHER (1) YOU WILL ONLY BE PERMITTED TO PURSUE CLAIMS AGAINST OFFCHAIN LABS ON AN INDIVIDUAL BASIS, NOT AS A PLAINTIFF OR CLASS MEMBER IN ANY CLASS OR REPRESENTATIVE ACTION OR PROCEEDING, (2) YOU WILL ONLY BE PERMITTED TO SEEK RELIEF (INCLUDING MONETARY, INJUNCTIVE, AND DECLARATORY RELIEF) ON AN INDIVIDUAL BASIS, AND (3) YOU MAY NOT BE ABLE TO HAVE ANY CLAIMS YOU HAVE AGAINST US RESOLVED BY A JURY OR IN A COURT OF LAW.
|
||||
### Licensing Terms
|
||||
Prysm is a fully open-source software program licensed pursuant to the GNU General Public License v3.0.
|
||||
|
||||
## Risks of Operating Prysm
|
||||
The Prysmatic Labs name, the term “Prysm” and all related names, logos, product and service names, designs and slogans are trademarks of Prysmatic Labs or its affiliates and/or licensors. You must not use such marks without our prior written permission.
|
||||
|
||||
The use of Prysm and acting as a validator on the Ethereum network can lead to loss of money, tokens and value. Ethereum is still an experimental system and ETH remains a risky investment. You alone are responsible for your actions on Prysm, including the security of your ETH and meeting any applicable minimum system requirements.
|
||||
### Risks of Operating Prysm
|
||||
The use of Prysm and acting as a validator on the Ethereum network can lead to loss of money. Ethereum is still an experimental system and ETH remains a risky investment. You alone are responsible for your actions on Prysm including the security of your ETH and meeting any applicable minimum system requirements.
|
||||
|
||||
Use of Prysm and the ability to receive rewards or penalties may be affected at any time by mistakes made by the user or other users, software problems such as bugs, errors, incorrectly constructed transactions, unsafe cryptographic libraries or malware affecting the network, technical failures in the hardware of a user, security problems experienced by a user and/or actions or inactions of third parties and/or events experienced by third parties, among other risks. We cannot and do not guarantee that any user of Prysm will make money, that the Prysm network will operate in accordance with the documentation or that transactions will be effective or secure.
|
||||
|
||||
YOU ACKNOWLEDGE THAT WE ARE NOT RESPONSIBLE FOR ANY RISKS ASSOCIATED WITH YOUR USE OF PRYSM, AND CANNOT BE HELD LIABLE FOR ANY RESULTING LOSSES THAT YOU EXPERIENCE WHILE ACCESSING OR USING PRYSM.
|
||||
We make no claims that Prysm is appropriate or permitted for use in any specific jurisdiction. Access to Prysm may not be legal by certain persons or in certain jurisdictions or countries. If you access Prysm, you do so on your own initiative and are responsible for compliance with local laws.
|
||||
|
||||
BY ACCESSING AND USING PRYSM, YOU REPRESENT AND WARRANT THAT YOU UNDERSTAND THE INHERENT RISKS ASSOCIATED WITH USING CRYPTOGRAPHIC AND BLOCKCHAIN-BASED SYSTEMS, AND THAT YOU HAVE A WORKING KNOWLEDGE OF THE USAGE AND INTRICACIES OF DIGITAL ASSETS, SUCH AS THOSE FOLLOWING THE ETHEREUM TOKEN STANDARD (ERC-20). YOU FURTHER UNDERSTAND THAT THE MARKETS FOR DIGITAL ASSETS ARE HIGHLY VOLATILE DUE TO VARIOUS FACTORS, INCLUDING ADOPTION, SPECULATION, TECHNOLOGY, SECURITY, AND REGULATION. YOU ACKNOWLEDGE AND ACCEPT THAT THE COST AND SPEED OF TRANSACTING WITH CRYPTOGRAPHIC AND BLOCKCHAIN-BASED SYSTEMS SUCH AS ETHEREUM ARE VARIABLE AND MAY INCREASE DRAMATICALLY AT ANY TIME. YOU UNDERSTAND THAT ANYONE CAN CREATE A TOKEN, INCLUDING FAKE VERSIONS OF EXISTING TOKENS AND TOKENS THAT FALSELY CLAIM TO REPRESENT PROJECTS, AND ACKNOWLEDGE AND ACCEPT THE RISK THAT YOU MAY MISTAKENLY INTERACT WITH THOSE OR OTHER TOKENS. YOU FURTHER ACKNOWLEDGE THAT WE ARE NOT RESPONSIBLE FOR ANY OF THE VARIABLES OR RISKS DESCRIBED IN THESE TERMS. YOU UNDERSTAND AND AGREE TO ASSUME FULL RESPONSIBILITY FOR ALL OF THE RISKS OF ACCESSING AND USING PRYSM. YOU ARE SOLELY RESPONSIBLE FOR YOUR WALLETS, FOR SAFEGUARDING THE ASSOCIATED PRIVATE KEY AND FOR ANY ACTIVITY THAT OCCURS USING YOUR WALLET. WITHOUT LIMITING THE FOREGOING, YOU ALSO UNDERSTAND THAT THERE MAY BE TAX AND REGULATORY RISKS RELATED TO USING PRYSM. IT IS YOUR SOLE RESPONSIBILITY TO DETERMINE WHETHER, AND TO WHAT EXTENT, ANY TAXES APPLY TO ANY TRANSACTIONS YOU CONDUCT IN CONNECTION WITH YOUR USE OF PRYSM, AND TO WITHHOLD, COLLECT, REPORT AND REMIT THE CORRECT AMOUNTS OF TAXES TO THE APPROPRIATE TAX AUTHORITIES. DIGITAL ASSETS, BLOCKCHAIN TECHNOLOGY, AND ANY RELATED SOFTWARE AND SERVICES ARE ALSO SUBJECT TO LEGAL AND REGULATORY UNCERTAINTY IN THE UNITED STATES AND OTHER JURISDICTIONS. YOU UNDERSTAND THAT LEGISLATIVE AND REGULATORY CHANGES OR ACTIONS MAY ADVERSELY AFFECT THE USAGE, TRANSFERABILITY, TRANSACTABILITY AND ACCESSIBILITY RELATED TO PRYSM.
|
||||
Some Internet plans will charge an additional amount for any excess upload bandwidth used that isn’t included in the plan and may terminate your connection without warning because of overuse. We advise that you check whether your Internet connection is subjected to such limitations and monitor your bandwidth use so that you can stop Prysm before you reach your upload limit.
|
||||
|
||||
We make no claims that Prysm is appropriate or permitted for use in any specific jurisdiction. Access to Prysm may not be legal by certain persons or in certain jurisdictions or countries. If you access Prysm, you do so on your own initiative and are responsible for compliance with all Applicable Law (as defined below), including, without limitation, for the avoidance of doubt, local laws.
|
||||
### Warranty Disclaimer
|
||||
PRYSM IS PROVIDED ON AN “AS-IS” BASIS AND MAY INCLUDE ERRORS, OMISSIONS, OR OTHER INACCURACIES. PRYSMATIC LABS AND ITS CONTRIBUTORS MAKE NO REPRESENTATIONS OR WARRANTIES ABOUT PRYSM FOR ANY PURPOSE, AND HEREBY EXPRESSLY DISCLAIM ALL WARRANTIES, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, ANY WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT OR ANY OTHER IMPLIED WARRANTY UNDER THE UNIFORM COMPUTER INFORMATION TRANSACTIONS ACT AS ENACTED BY ANY STATE. WE ALSO MAKE NO REPRESENTATIONS OR WARRANTIES THAT PRYSM WILL OPERATE ERROR-FREE, UNINTERRUPTED, OR IN A MANNER THAT WILL MEET YOUR REQUIREMENTS AND/OR NEEDS. THEREFORE, YOU ASSUME THE ENTIRE RISK REGARDING THE QUALITY AND/OR PERFORMANCE OF PRYSM AND ANY TRANSACTIONS ENTERED INTO THEREON.
|
||||
|
||||
Some Internet plans will charge additional amounts for bandwidth or any excess upload bandwidth used that isn’t included in the plan and may terminate your connection without warning because of overuse. We advise that you check whether your Internet connection is subjected to any such limitations and monitor your bandwidth use and upload volumes.
|
||||
### Limitation of Liability
|
||||
In no event will Prysmatic Labs or any of its contributors be liable, whether in contract, warranty, tort (including negligence, whether active, passive or imputed), product liability, strict liability or other theory, breach of statutory duty or otherwise arising out of, or in connection with, your use of Prysm, for any direct, indirect, incidental, special or consequential damages (including any loss of profits or data, business interruption or other pecuniary loss, or damage, loss or other compromise of data, in each case whether direct, indirect, incidental, special or consequential) arising out of use Prysm, even if we or other users have been advised of the possibility of such damages. The foregoing limitations and disclaimers shall apply to the maximum extent permitted by applicable law, even if any remedy fails of its essential purpose. You acknowledge and agree that the limitations of liability afforded us hereunder constitute a material and actual inducement and condition to entering into these Terms, and are reasonable, fair and equitable in scope to protect our legitimate interests in light of the fact that we are not receiving consideration from you for providing Prysm.
|
||||
|
||||
## Warranty Disclaimer
|
||||
### Indemnification
|
||||
To the maximum extent permitted by law, you will defend, indemnify and hold Prysmatic Labs and its contributors harmless from and against any and all claims, actions, suits, investigations, or proceedings by any third party (including any party or purported party to or beneficiary or purported beneficiary of any transaction on Prysm), as well as any and all losses, liabilities,
|
||||
damages, costs, and expenses (including reasonable attorneys’ fees) arising out of, accruing from, or in any way related to (i) your breach of the terms of this Agreement, (ii) any transaction, or the failure to occur of any transaction on Prysm, and (iii) your negligence, fraud, or willful misconduct.
|
||||
|
||||
PRYSM IS PROVIDED ON AN “AS-IS” BASIS AND MAY INCLUDE ERRORS, OMISSIONS, OR OTHER INACCURACIES. WITHOUT LIMITING ANYTHING SET FORTH ELSEWHERE IN THESE TERMS, OFFCHAIN LABS AND ITS CONTRIBUTORS MAKE NO REPRESENTATIONS OR WARRANTIES ABOUT PRYSM FOR ANY PURPOSE, AND HEREBY EXPRESSLY DISCLAIM ALL WARRANTIES, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, ANY WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT OR ANY OTHER IMPLIED WARRANTY UNDER THE UNIFORM COMPUTER INFORMATION TRANSACTIONS ACT AS ENACTED BY ANY STATE OR OTHER GOVERNMENTAL AUTHORITY. WE ALSO MAKE NO REPRESENTATIONS OR WARRANTIES THAT PRYSM WILL OPERATE ERROR-FREE, UNINTERRUPTED, OR IN A MANNER THAT WILL MEET YOUR REQUIREMENTS AND/OR NEEDS. THEREFORE, YOU ASSUME THE ENTIRE RISK REGARDING THE QUALITY AND/OR PERFORMANCE OF PRYSM AND ANY TRANSACTIONS ENTERED INTO THEREON.
|
||||
### Compliance with Laws and Tax Obligations
|
||||
Your use of Prysm is subject to all applicable laws of any governmental authority, including, without limitation, federal, state and foreign securities laws, tax laws, tariff and trade laws, ordinances, judgments, decrees, injunctions, writs and orders or like actions of any governmental authority and rules, regulations, orders, interpretations, licenses, and permits of any federal,
|
||||
regional, state, county, municipal or other governmental authority and you agree to comply with all such laws in your use of Prysm. The users of Prysm are solely responsible to determinate what, if any, taxes apply to their ETH transactions. The owners of, or contributors to, Prysm are not responsible for determining the taxes that apply to ETH transactions.
|
||||
|
||||
## Limitation of Liability
|
||||
### Miscellaneous
|
||||
These Terms will be construed and enforced in accordance with the laws of the state of Illinois as applied to agreements entered into and completely performed in Illinois. You agree to the personal jurisdiction by and venue in Illinois and waive any objection to such jurisdiction or venue.
|
||||
|
||||
IN NO EVENT WILL OFFCHAIN LABS OR ANY OF ITS AFFILIATES OR ITS OR ANY SUCH AFFILIATE’S DIRECTORS, OFFICERS, EMPLOYEES, AGENTS, OR REPRESENTATIVES OR ANY CONTRIBUTORS (COLLECTIVELY, THE “OCL PARTIES”) BE LIABLE, WHETHER IN CONTRACT, WARRANTY, TORT (INCLUDING NEGLIGENCE, WHETHER ACTIVE, PASSIVE OR IMPUTED), PRODUCT LIABILITY, STRICT LIABILITY OR OTHER THEORY, BREACH OF STATUTORY DUTY OR OTHERWISE ARISING OUT OF, OR IN CONNECTION WITH, YOUR USE OF PRYSM, FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL OR CONSEQUENTIAL DAMAGES (INCLUDING ANY LOSS OF PROFITS OR DATA, BUSINESS INTERRUPTION OR OTHER PECUNIARY LOSS, OR DAMAGE, LOSS OR OTHER COMPROMISE OF DATA, IN EACH CASE WHETHER DIRECT, INDIRECT, INCIDENTAL, SPECIAL OR CONSEQUENTIAL) ARISING OUT OF USE PRYSM, EVEN IF WE OR OTHER USERS HAVE BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. The foregoing limitations and disclaimers shall apply to the maximum extent permitted by Applicable Law, even if any remedy fails of its essential purpose. You acknowledge and agree that the limitations of liability afforded us hereunder constitute a material and actual inducement and condition to entering into these Terms, and are reasonable, fair and equitable in scope to protect our legitimate interests in light of the fact that we are not receiving consideration from you for providing Prysm.
|
||||
We reserve the right to revise these Terms, and your rights and obligations are at all times subject to the then-current Terms provided on Prysm. Your continued use of Prysm constitutes acceptance of such revised Terms.
|
||||
|
||||
## Indemnification
|
||||
|
||||
To the maximum extent permitted by Applicable Law, you will defend, indemnify and hold each OCL Party harmless from and against any and all claims, actions, suits, investigations, or proceedings by any third party (including any party or purported party to or beneficiary or purported beneficiary of any transaction or other activity on Prysm), as well as any and all losses, liabilities, damages, costs, and expenses (including reasonable attorneys’ fees and costs) arising out of, accruing from, or in any way related to (i) your breach of the terms of this Agreement, (ii) any transaction, or the failure to occur of any transaction on Prysm, and (iii) your negligence, fraud, or willful misconduct.
|
||||
|
||||
## Compliance with Laws
|
||||
|
||||
Your use of Prysm is subject to all applicable laws of any governmental authority, including, without limitation, federal, state and foreign securities laws, tax laws, tariff and trade laws, ordinances, judgments, decrees, injunctions, writs and orders or like actions of any governmental authority and rules, regulations, orders, interpretations, licenses, and permits of any federal, regional, state, county, municipal or other governmental authority (collectively, “Applicable Law”) and you agree to comply with all such Applicable Law in your use of Prysm. The users of Prysm are solely responsible to determinate what, if any, taxes apply to their ETH transactions. The owners of, or contributors to, Prysm are not responsible for determining the taxes that apply to ETH transactions.
|
||||
|
||||
## Miscellaneous
|
||||
|
||||
These Terms will be governed by the laws of the State of Delaware without regard to its conflict of law provisions. With respect to any disputes or claims not subject to arbitration, as set forth in the OCL Terms of Use, you and Offchain Labs submit to the personal and exclusive jurisdiction of the state and federal courts located within New York, New York and waive any objection to such jurisdiction and venue. The failure of Offchain Labs to exercise or enforce any right or provision of these Terms will not constitute a waiver of such right or provision.
|
||||
We reserve the right to revise these Terms, and your rights and obligations are at all times subject to the then-current Terms provided on Prysm. Your use of Prysm following any such revision to these Terms constitutes acceptance of such revised Terms.
|
||||
|
||||
These Terms constitute the entire agreement between you and Offchain Labs regarding use of Prysm and will supersede all prior agreements whether, written or oral. No usage of trade or other regular practice or method of dealing between the parties will be used to modify, interpret, supplement, or alter the terms of these Terms.
|
||||
|
||||
If any portion of these Terms is held invalid or unenforceable, such invalidity or enforceability will not affect the other provisions of these Terms, which will remain in full force and effect, and the invalid or unenforceable portion will be given effect to the greatest extent possible. The failure of a party to require performance of any provision will not affect that party’s right to require performance at any time thereafter, nor will a waiver of any breach or default of these Terms or any provision of these Terms constitute a waiver of any subsequent breach or default or a waiver of the provision itself.
|
||||
These Terms constitute the entire agreement between you and Prysmatic Labs regarding use of Prysm and will supersede all prior agreements whether, written or oral. No usage of trade or other regular practice or method of dealing between the parties will be used to modify, interpret, supplement, or alter the terms of these Terms.
|
||||
|
||||
If any portion of these Terms is held invalid or unenforceable, such invalidity or enforceability will not affect the other provisions of these Terms, which will remain in full force and effect, and the invalid or unenforceable portion will be given effect to the greatest extent possible. The failure of a party to require performance of any provision will not affect that party’s right to require performance at any time thereafter, nor will a waiver of any breach or default of these Terms or any provision of these Terms constitute a waiver of any subsequent breach or default or a waiver of the provision itself.
|
||||
380
WORKSPACE
380
WORKSPACE
@@ -4,82 +4,47 @@ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
|
||||
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
|
||||
|
||||
http_archive(
|
||||
name = "rules_pkg",
|
||||
sha256 = "8c20f74bca25d2d442b327ae26768c02cf3c99e93fad0381f32be9aab1967675",
|
||||
name = "bazel_toolchains",
|
||||
sha256 = "8e0633dfb59f704594f19ae996a35650747adc621ada5e8b9fb588f808c89cb0",
|
||||
strip_prefix = "bazel-toolchains-3.7.0",
|
||||
urls = [
|
||||
"https://mirror.bazel.build/github.com/bazelbuild/rules_pkg/releases/download/0.8.1/rules_pkg-0.8.1.tar.gz",
|
||||
"https://github.com/bazelbuild/rules_pkg/releases/download/0.8.1/rules_pkg-0.8.1.tar.gz",
|
||||
"https://mirror.bazel.build/github.com/bazelbuild/bazel-toolchains/releases/download/3.7.0/bazel-toolchains-3.7.0.tar.gz",
|
||||
"https://github.com/bazelbuild/bazel-toolchains/releases/download/3.7.0/bazel-toolchains-3.7.0.tar.gz",
|
||||
],
|
||||
)
|
||||
|
||||
load("@rules_pkg//:deps.bzl", "rules_pkg_dependencies")
|
||||
|
||||
rules_pkg_dependencies()
|
||||
|
||||
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
|
||||
|
||||
http_archive(
|
||||
name = "toolchains_protoc",
|
||||
sha256 = "abb1540f8a9e045422730670ebb2f25b41fa56ca5a7cf795175a110a0a68f4ad",
|
||||
strip_prefix = "toolchains_protoc-0.3.6",
|
||||
url = "https://github.com/aspect-build/toolchains_protoc/releases/download/v0.3.6/toolchains_protoc-v0.3.6.tar.gz",
|
||||
name = "com_grail_bazel_toolchain",
|
||||
sha256 = "040b9d00b8a03e8a28e38159ad0f2d0e0de625d93f453a9f226971a8c47e757b",
|
||||
strip_prefix = "bazel-toolchain-5f82830f9d6a1941c3eb29683c1864ccf2862454",
|
||||
urls = ["https://github.com/grailbio/bazel-toolchain/archive/5f82830f9d6a1941c3eb29683c1864ccf2862454.tar.gz"],
|
||||
)
|
||||
|
||||
load("@toolchains_protoc//protoc:repositories.bzl", "rules_protoc_dependencies")
|
||||
load("@com_grail_bazel_toolchain//toolchain:deps.bzl", "bazel_toolchain_dependencies")
|
||||
|
||||
rules_protoc_dependencies()
|
||||
bazel_toolchain_dependencies()
|
||||
|
||||
load("@rules_proto//proto:repositories.bzl", "rules_proto_dependencies")
|
||||
load("@com_grail_bazel_toolchain//toolchain:rules.bzl", "llvm_toolchain")
|
||||
|
||||
rules_proto_dependencies()
|
||||
|
||||
load("@bazel_features//:deps.bzl", "bazel_features_deps")
|
||||
|
||||
bazel_features_deps()
|
||||
|
||||
load("@toolchains_protoc//protoc:toolchain.bzl", "protoc_toolchains")
|
||||
|
||||
protoc_toolchains(
|
||||
name = "protoc_toolchains",
|
||||
version = "v25.3",
|
||||
llvm_toolchain(
|
||||
name = "llvm_toolchain",
|
||||
llvm_version = "10.0.0",
|
||||
)
|
||||
|
||||
HERMETIC_CC_TOOLCHAIN_VERSION = "v3.0.1"
|
||||
load("@llvm_toolchain//:toolchains.bzl", "llvm_register_toolchains")
|
||||
|
||||
http_archive(
|
||||
name = "hermetic_cc_toolchain",
|
||||
sha256 = "3bc6ec127622fdceb4129cb06b6f7ab098c4d539124dde96a6318e7c32a53f7a",
|
||||
urls = [
|
||||
"https://mirror.bazel.build/github.com/uber/hermetic_cc_toolchain/releases/download/{0}/hermetic_cc_toolchain-{0}.tar.gz".format(HERMETIC_CC_TOOLCHAIN_VERSION),
|
||||
"https://github.com/uber/hermetic_cc_toolchain/releases/download/{0}/hermetic_cc_toolchain-{0}.tar.gz".format(HERMETIC_CC_TOOLCHAIN_VERSION),
|
||||
],
|
||||
)
|
||||
|
||||
load("@hermetic_cc_toolchain//toolchain:defs.bzl", zig_toolchains = "toolchains")
|
||||
|
||||
zig_toolchains()
|
||||
|
||||
# Register zig sdk toolchains with support for Ubuntu 20.04 (Focal Fossa) which has an EOL date of April, 2025.
|
||||
# For ubuntu glibc support, see https://launchpad.net/ubuntu/+source/glibc
|
||||
register_toolchains(
|
||||
"@zig_sdk//toolchain:linux_amd64_gnu.2.31",
|
||||
"@zig_sdk//toolchain:linux_arm64_gnu.2.31",
|
||||
# Hermetic cc toolchain is not yet supported on darwin. Sysroot needs to be provided.
|
||||
# See https://github.com/uber/hermetic_cc_toolchain#osx-sysroot
|
||||
# "@zig_sdk//toolchain:darwin_amd64",
|
||||
# "@zig_sdk//toolchain:darwin_arm64",
|
||||
# Windows builds are not supported yet.
|
||||
# "@zig_sdk//toolchain:windows_amd64",
|
||||
)
|
||||
|
||||
load("@prysm//tools/cross-toolchain:darwin_cc_hack.bzl", "configure_nonhermetic_darwin")
|
||||
|
||||
configure_nonhermetic_darwin()
|
||||
llvm_register_toolchains()
|
||||
|
||||
load("@prysm//tools/cross-toolchain:prysm_toolchains.bzl", "configure_prysm_toolchains")
|
||||
|
||||
configure_prysm_toolchains()
|
||||
|
||||
load("@prysm//tools/cross-toolchain:rbe_toolchains_config.bzl", "rbe_toolchains_config")
|
||||
|
||||
rbe_toolchains_config()
|
||||
|
||||
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
|
||||
|
||||
http_archive(
|
||||
name = "bazel_skylib",
|
||||
sha256 = "1c531376ac7e5a180e0237938a2536de0c54d93f5c278634818e0efc952dd56c",
|
||||
@@ -95,10 +60,10 @@ bazel_skylib_workspace()
|
||||
|
||||
http_archive(
|
||||
name = "bazel_gazelle",
|
||||
integrity = "sha256-MpOL2hbmcABjA1R5Bj2dJMYO2o15/Uc5Vj9Q0zHLMgk=",
|
||||
sha256 = "de69a09dc70417580aabf20a28619bb3ef60d038470c7cf8442fafcf627c21cb",
|
||||
urls = [
|
||||
"https://mirror.bazel.build/github.com/bazelbuild/bazel-gazelle/releases/download/v0.35.0/bazel-gazelle-v0.35.0.tar.gz",
|
||||
"https://github.com/bazelbuild/bazel-gazelle/releases/download/v0.35.0/bazel-gazelle-v0.35.0.tar.gz",
|
||||
"https://mirror.bazel.build/github.com/bazelbuild/bazel-gazelle/releases/download/v0.24.0/bazel-gazelle-v0.24.0.tar.gz",
|
||||
"https://github.com/bazelbuild/bazel-gazelle/releases/download/v0.24.0/bazel-gazelle-v0.24.0.tar.gz",
|
||||
],
|
||||
)
|
||||
|
||||
@@ -110,65 +75,23 @@ http_archive(
|
||||
)
|
||||
|
||||
http_archive(
|
||||
name = "rules_distroless",
|
||||
sha256 = "e64f06e452cd153aeab81f752ccf4642955b3af319e64f7bc7a7c9252f76b10e",
|
||||
strip_prefix = "rules_distroless-f5e678217b57ce3ad2f1c0204bd4e9d416255773",
|
||||
url = "https://github.com/GoogleContainerTools/rules_distroless/archive/f5e678217b57ce3ad2f1c0204bd4e9d416255773.tar.gz",
|
||||
)
|
||||
|
||||
load("@rules_distroless//distroless:dependencies.bzl", "rules_distroless_dependencies")
|
||||
|
||||
rules_distroless_dependencies()
|
||||
|
||||
http_archive(
|
||||
name = "distroless",
|
||||
integrity = "sha256-Cf00kUp1NyXA3LzbdyYy4Kda27wbkB8+A9MliTxq4jE=",
|
||||
strip_prefix = "distroless-9dc924b9fe812eec2fa0061824dcad39eb09d0d6",
|
||||
url = "https://github.com/GoogleContainerTools/distroless/archive/9dc924b9fe812eec2fa0061824dcad39eb09d0d6.tar.gz", # 2024-01-24
|
||||
)
|
||||
|
||||
http_archive(
|
||||
name = "aspect_bazel_lib",
|
||||
sha256 = "a272d79bb0ac6b6965aa199b1f84333413452e87f043b53eca7f347a23a478e8",
|
||||
strip_prefix = "bazel-lib-2.9.3",
|
||||
url = "https://github.com/bazel-contrib/bazel-lib/releases/download/v2.9.3/bazel-lib-v2.9.3.tar.gz",
|
||||
)
|
||||
|
||||
load("@aspect_bazel_lib//lib:repositories.bzl", "aspect_bazel_lib_dependencies", "aspect_bazel_lib_register_toolchains")
|
||||
|
||||
aspect_bazel_lib_dependencies()
|
||||
|
||||
aspect_bazel_lib_register_toolchains()
|
||||
|
||||
http_archive(
|
||||
name = "rules_oci",
|
||||
sha256 = "4a276e9566c03491649eef63f27c2816cc222f41ccdebd97d2c5159e84917c3b",
|
||||
strip_prefix = "rules_oci-1.7.4",
|
||||
url = "https://github.com/bazel-contrib/rules_oci/releases/download/v1.7.4/rules_oci-v1.7.4.tar.gz",
|
||||
)
|
||||
|
||||
load("@rules_oci//oci:dependencies.bzl", "rules_oci_dependencies")
|
||||
|
||||
rules_oci_dependencies()
|
||||
|
||||
load("@rules_oci//oci:repositories.bzl", "LATEST_CRANE_VERSION", "oci_register_toolchains")
|
||||
|
||||
oci_register_toolchains(
|
||||
name = "oci",
|
||||
crane_version = LATEST_CRANE_VERSION,
|
||||
name = "io_bazel_rules_docker",
|
||||
sha256 = "1f4e59843b61981a96835dc4ac377ad4da9f8c334ebe5e0bb3f58f80c09735f4",
|
||||
strip_prefix = "rules_docker-0.19.0",
|
||||
urls = ["https://github.com/bazelbuild/rules_docker/releases/download/v0.19.0/rules_docker-v0.19.0.tar.gz"],
|
||||
)
|
||||
|
||||
http_archive(
|
||||
name = "io_bazel_rules_go",
|
||||
integrity = "sha256-JD8o94crTb2DFiJJR8nMAGdBAW95zIENB4cbI+JnrI4=",
|
||||
patch_args = ["-p1"],
|
||||
patches = [
|
||||
# Expose internals of go_test for custom build transitions.
|
||||
"//third_party:io_bazel_rules_go_test.patch",
|
||||
],
|
||||
strip_prefix = "rules_go-cf3c3af34bd869b864f5f2b98e2f41c2b220d6c9",
|
||||
sha256 = "2b1641428dff9018f9e85c0384f03ec6c10660d935b750e3fa1492a281a53b0f",
|
||||
urls = [
|
||||
"https://github.com/bazel-contrib/rules_go/archive/cf3c3af34bd869b864f5f2b98e2f41c2b220d6c9.tar.gz",
|
||||
"https://mirror.bazel.build/github.com/bazelbuild/rules_go/releases/download/v0.29.0/rules_go-v0.29.0.zip",
|
||||
"https://github.com/bazelbuild/rules_go/releases/download/v0.29.0/rules_go-v0.29.0.zip",
|
||||
],
|
||||
)
|
||||
|
||||
@@ -187,36 +110,88 @@ git_repository(
|
||||
# gazelle args: -go_prefix github.com/gogo/protobuf -proto legacy
|
||||
)
|
||||
|
||||
load("@rules_oci//oci:pull.bzl", "oci_pull")
|
||||
|
||||
# A multi-arch base image
|
||||
oci_pull(
|
||||
name = "linux_debian11_multiarch_base", # Debian bullseye
|
||||
digest = "sha256:b82f113425c5b5c714151aaacd8039bc141821cdcd3c65202d42bdf9c43ae60b", # 2023-12-12
|
||||
image = "gcr.io/prysmaticlabs/distroless/cc-debian11",
|
||||
platforms = [
|
||||
"linux/amd64",
|
||||
"linux/arm64/v8",
|
||||
],
|
||||
reproducible = True,
|
||||
http_archive(
|
||||
name = "fuzzit_linux",
|
||||
build_file_content = "exports_files([\"fuzzit\"])",
|
||||
sha256 = "9ca76ac1c22d9360936006efddf992977ebf8e4788ded8e5f9d511285c9ac774",
|
||||
urls = ["https://github.com/fuzzitdev/fuzzit/releases/download/v2.4.76/fuzzit_Linux_x86_64.zip"],
|
||||
)
|
||||
|
||||
load("@prysm//tools:image_deps.bzl", "prysm_image_deps")
|
||||
load(
|
||||
"@io_bazel_rules_docker//repositories:repositories.bzl",
|
||||
container_repositories = "repositories",
|
||||
)
|
||||
|
||||
prysm_image_deps()
|
||||
container_repositories()
|
||||
|
||||
load(
|
||||
"@io_bazel_rules_docker//container:container.bzl",
|
||||
"container_pull",
|
||||
)
|
||||
|
||||
container_pull(
|
||||
name = "cc_image_base",
|
||||
digest = "sha256:2c4bb6b7236db0a55ec54ba8845e4031f5db2be957ac61867872bf42e56c4deb",
|
||||
registry = "gcr.io",
|
||||
repository = "distroless/cc",
|
||||
)
|
||||
|
||||
container_pull(
|
||||
name = "cc_debug_image_base",
|
||||
digest = "sha256:3680c61e81f68fc00bfb5e1ec65e8e678aaafa7c5f056bc2681c29527ebbb30c",
|
||||
registry = "gcr.io",
|
||||
repository = "distroless/cc",
|
||||
)
|
||||
|
||||
container_pull(
|
||||
name = "go_image_base",
|
||||
digest = "sha256:ba7a315f86771332e76fa9c3d423ecfdbb8265879c6f1c264d6fff7d4fa460a4",
|
||||
registry = "gcr.io",
|
||||
repository = "distroless/base",
|
||||
)
|
||||
|
||||
container_pull(
|
||||
name = "go_debug_image_base",
|
||||
digest = "sha256:efd8711717d9e9b5d0dbb20ea10876dab0609c923bc05321b912f9239090ca80",
|
||||
registry = "gcr.io",
|
||||
repository = "distroless/base",
|
||||
)
|
||||
|
||||
container_pull(
|
||||
name = "alpine_cc_linux_amd64",
|
||||
digest = "sha256:752aa0c9a88461ffc50c5267bb7497ef03a303e38b2c8f7f2ded9bebe5f1f00e",
|
||||
registry = "index.docker.io",
|
||||
repository = "pinglamb/alpine-glibc",
|
||||
)
|
||||
|
||||
container_pull(
|
||||
name = "fuzzit_base",
|
||||
digest = "sha256:24a39a4360b07b8f0121eb55674a2e757ab09f0baff5569332fefd227ee4338f",
|
||||
registry = "gcr.io",
|
||||
repository = "fuzzit-public/stretch-llvm8",
|
||||
)
|
||||
|
||||
load("@io_bazel_rules_go//go:deps.bzl", "go_register_toolchains", "go_rules_dependencies")
|
||||
|
||||
go_rules_dependencies()
|
||||
|
||||
go_register_toolchains(
|
||||
go_version = "1.24.0",
|
||||
go_version = "1.17.6",
|
||||
nogo = "@//:nogo",
|
||||
)
|
||||
|
||||
load("//:distroless_deps.bzl", "distroless_deps")
|
||||
|
||||
distroless_deps()
|
||||
http_archive(
|
||||
name = "prysm_testnet_site",
|
||||
build_file_content = """
|
||||
proto_library(
|
||||
name = "faucet_proto",
|
||||
srcs = ["src/proto/faucet.proto"],
|
||||
visibility = ["//visibility:public"],
|
||||
)""",
|
||||
sha256 = "29742136ff9faf47343073c4569a7cf21b8ed138f726929e09e3c38ab83544f7",
|
||||
strip_prefix = "prysm-testnet-site-5c711600f0a77fc553b18cf37b880eaffef4afdb",
|
||||
url = "https://github.com/prestonvanloon/prysm-testnet-site/archive/5c711600f0a77fc553b18cf37b880eaffef4afdb.tar.gz",
|
||||
)
|
||||
|
||||
http_archive(
|
||||
name = "io_kubernetes_build",
|
||||
@@ -236,26 +211,11 @@ filegroup(
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
sha256 = "516d551cfb3e50e4ac2f42db0992f4ceb573a7cb1616d727a725c8161485329f",
|
||||
url = "https://github.com/eth-clients/slashing-protection-interchange-tests/archive/refs/tags/v5.3.0.tar.gz",
|
||||
sha256 = "91434d5fd5e1c6eb7b0174fed2afe25e09bddf00e1e4c431db931b2cee4e7773",
|
||||
url = "https://github.com/eth-clients/slashing-protection-interchange-tests/archive/b8413ca42dc92308019d0d4db52c87e9e125c4e9.tar.gz",
|
||||
)
|
||||
|
||||
http_archive(
|
||||
name = "eip4881_spec_tests",
|
||||
build_file_content = """
|
||||
filegroup(
|
||||
name = "test_data",
|
||||
srcs = glob([
|
||||
"**/*.yaml",
|
||||
]),
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
sha256 = "89cb659498c0d196fc9f957f8b849b2e1a5c041c3b2b3ae5432ac5c26944297e",
|
||||
url = "https://github.com/ethereum/EIPs/archive/5480440fe51742ed23342b68cf106cefd427e39d.tar.gz",
|
||||
)
|
||||
|
||||
consensus_spec_version = "v1.5.0-beta.4"
|
||||
consensus_spec_version = "v1.1.10"
|
||||
|
||||
bls_test_version = "v0.1.1"
|
||||
|
||||
@@ -271,7 +231,7 @@ filegroup(
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
integrity = "sha256-QG0NUqaCvP5lKaKKwF/fmeICZVjONMlb7EE+MtYl0C0=",
|
||||
sha256 = "28043009cc2f6fc9804e73c8c1fc2cb27062f1591e6884f3015ae1dd7a276883",
|
||||
url = "https://github.com/ethereum/consensus-spec-tests/releases/download/%s/general.tar.gz" % consensus_spec_version,
|
||||
)
|
||||
|
||||
@@ -287,7 +247,7 @@ filegroup(
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
integrity = "sha256-8NQngTSSqzW/j3tOUi3r5h+94ChRbLNWTt7BOGqr4+E=",
|
||||
sha256 = "bc1a283ca068f310f04d70c4f6a8eaa0b8f7e9318073a8bdc2ee233111b4e339",
|
||||
url = "https://github.com/ethereum/consensus-spec-tests/releases/download/%s/minimal.tar.gz" % consensus_spec_version,
|
||||
)
|
||||
|
||||
@@ -303,7 +263,7 @@ filegroup(
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
integrity = "sha256-gFqxbaBnJ7dtdoj0zFbVrtlHv/bLNuWjrTHkyCAjFjI=",
|
||||
sha256 = "bbabb482c229ff9d4e2c7b77c992edb452f9d0af7c6d8dd4f922f06a7b101e81",
|
||||
url = "https://github.com/ethereum/consensus-spec-tests/releases/download/%s/mainnet.tar.gz" % consensus_spec_version,
|
||||
)
|
||||
|
||||
@@ -318,7 +278,7 @@ filegroup(
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
integrity = "sha256-9paalF0POULpP2ga+4ouHSETKYrWNCUCZoJHPuFw06E=",
|
||||
sha256 = "408a5524548ad3fcf387f65ac7ec52781d9ee899499720bb12451b48a15818d4",
|
||||
strip_prefix = "consensus-specs-" + consensus_spec_version[1:],
|
||||
url = "https://github.com/ethereum/consensus-specs/archive/refs/tags/%s.tar.gz" % consensus_spec_version,
|
||||
)
|
||||
@@ -349,83 +309,43 @@ filegroup(
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
sha256 = "77e7e3ed65e33b7bb19d30131f4c2bb39e4dfeb188ab9ae84651c3cc7600131d",
|
||||
strip_prefix = "eth2-networks-934c948e69205dcf2deb87e4ae6cc140c335f94d",
|
||||
url = "https://github.com/eth-clients/eth2-networks/archive/934c948e69205dcf2deb87e4ae6cc140c335f94d.tar.gz",
|
||||
sha256 = "4e8a18b21d056c4032605621b1a6632198eabab57cb90c61e273f344c287f1b2",
|
||||
strip_prefix = "eth2-networks-791a5369c5981e829698b17fbcdcdacbdaba97c8",
|
||||
url = "https://github.com/eth-clients/eth2-networks/archive/791a5369c5981e829698b17fbcdcdacbdaba97c8.tar.gz",
|
||||
)
|
||||
|
||||
http_archive(
|
||||
name = "holesky_testnet",
|
||||
build_file_content = """
|
||||
filegroup(
|
||||
name = "configs",
|
||||
srcs = [
|
||||
"metadata/config.yaml",
|
||||
],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
integrity = "sha256-YVFFrCmjoGZ3fXMWpsCpSsYbANy1grnqYwOLKIg2SsA=",
|
||||
strip_prefix = "holesky-32a72e21c6e53c262f27d50dd540cb654517d03a",
|
||||
url = "https://github.com/eth-clients/holesky/archive/32a72e21c6e53c262f27d50dd540cb654517d03a.tar.gz", # 2025-03-17
|
||||
name = "com_github_bazelbuild_buildtools",
|
||||
sha256 = "7a182df18df1debabd9e36ae07c8edfa1378b8424a04561b674d933b965372b3",
|
||||
strip_prefix = "buildtools-f2aed9ee205d62d45c55cfabbfd26342f8526862",
|
||||
url = "https://github.com/bazelbuild/buildtools/archive/f2aed9ee205d62d45c55cfabbfd26342f8526862.zip",
|
||||
)
|
||||
|
||||
http_archive(
|
||||
name = "sepolia_testnet",
|
||||
build_file_content = """
|
||||
filegroup(
|
||||
name = "configs",
|
||||
srcs = [
|
||||
"metadata/config.yaml",
|
||||
],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
integrity = "sha256-b5F7Wg9LLMqGRIpP2uqb/YsSFVn2ynzlV7g/Nb1EFLk=",
|
||||
strip_prefix = "sepolia-562d9938f08675e9ba490a1dfba21fb05843f39f",
|
||||
url = "https://github.com/eth-clients/sepolia/archive/562d9938f08675e9ba490a1dfba21fb05843f39f.tar.gz", # 2025-03-17
|
||||
)
|
||||
|
||||
http_archive(
|
||||
name = "hoodi_testnet",
|
||||
build_file_content = """
|
||||
filegroup(
|
||||
name = "configs",
|
||||
srcs = [
|
||||
"metadata/config.yaml",
|
||||
],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
integrity = "sha256-dPiEWUd8QvbYGwGtIm0QtCekitVLOLsW5rpQIGzz8PU=",
|
||||
strip_prefix = "hoodi-828c2c940e1141092bd4bb979cef547ea926d272",
|
||||
url = "https://github.com/eth-clients/hoodi/archive/828c2c940e1141092bd4bb979cef547ea926d272.tar.gz",
|
||||
)
|
||||
|
||||
http_archive(
|
||||
git_repository(
|
||||
name = "com_google_protobuf",
|
||||
sha256 = "9bd87b8280ef720d3240514f884e56a712f2218f0d693b48050c836028940a42",
|
||||
strip_prefix = "protobuf-25.1",
|
||||
urls = [
|
||||
"https://github.com/protocolbuffers/protobuf/archive/v25.1.tar.gz",
|
||||
],
|
||||
commit = "436bd7880e458532901c58f4d9d1ea23fa7edd52",
|
||||
remote = "https://github.com/protocolbuffers/protobuf",
|
||||
shallow_since = "1617835118 -0700",
|
||||
)
|
||||
|
||||
# Group the sources of the library so that CMake rule have access to it
|
||||
all_content = """filegroup(name = "all", srcs = glob(["**"]), visibility = ["//visibility:public"])"""
|
||||
|
||||
# External dependencies
|
||||
|
||||
http_archive(
|
||||
name = "googleapis",
|
||||
sha256 = "9d1a930e767c93c825398b8f8692eca3fe353b9aaadedfbcf1fca2282c85df88",
|
||||
strip_prefix = "googleapis-64926d52febbf298cb82a8f472ade4a3969ba922",
|
||||
urls = [
|
||||
"https://github.com/googleapis/googleapis/archive/64926d52febbf298cb82a8f472ade4a3969ba922.zip",
|
||||
],
|
||||
name = "prysm_web_ui",
|
||||
build_file_content = """
|
||||
filegroup(
|
||||
name = "site",
|
||||
srcs = glob(["**/*"]),
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
load("@googleapis//:repository_rules.bzl", "switched_rules_by_language")
|
||||
|
||||
switched_rules_by_language(
|
||||
name = "com_google_googleapis_imports",
|
||||
go = True,
|
||||
""",
|
||||
sha256 = "4797a7e594a5b1f4c1c8080701613f3ee451b01ec0861499ea7d9b60877a6b23",
|
||||
urls = [
|
||||
"https://github.com/prysmaticlabs/prysm-web-ui/releases/download/v1.0.3/prysm-web-ui.tar.gz",
|
||||
],
|
||||
)
|
||||
|
||||
load("//:deps.bzl", "prysm_deps")
|
||||
@@ -441,13 +361,39 @@ load("@prysm//testing/endtoend:deps.bzl", "e2e_deps")
|
||||
|
||||
e2e_deps()
|
||||
|
||||
load(
|
||||
"@io_bazel_rules_docker//go:image.bzl",
|
||||
_go_image_repos = "repositories",
|
||||
)
|
||||
|
||||
# Golang images
|
||||
# This is using gcr.io/distroless/base
|
||||
_go_image_repos()
|
||||
|
||||
# CC images
|
||||
# This is using gcr.io/distroless/base
|
||||
load(
|
||||
"@io_bazel_rules_docker//cc:image.bzl",
|
||||
_cc_image_repos = "repositories",
|
||||
)
|
||||
|
||||
_cc_image_repos()
|
||||
|
||||
load("@io_bazel_rules_go//extras:embed_data_deps.bzl", "go_embed_data_dependencies")
|
||||
|
||||
go_embed_data_dependencies()
|
||||
|
||||
load("@com_github_atlassian_bazel_tools//gometalinter:deps.bzl", "gometalinter_dependencies")
|
||||
|
||||
gometalinter_dependencies()
|
||||
|
||||
load("@bazel_gazelle//:deps.bzl", "gazelle_dependencies")
|
||||
|
||||
gazelle_dependencies(go_sdk = "go_sdk")
|
||||
gazelle_dependencies()
|
||||
|
||||
load("@com_github_bazelbuild_buildtools//buildifier:deps.bzl", "buildifier_dependencies")
|
||||
|
||||
buildifier_dependencies()
|
||||
|
||||
load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps")
|
||||
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library", "go_test")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"constants.go",
|
||||
"headers.go",
|
||||
"jwt.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//crypto/rand:go_default_library",
|
||||
"@com_github_ethereum_go_ethereum//common/hexutil:go_default_library",
|
||||
"@com_github_pkg_errors//:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "go_default_test",
|
||||
srcs = ["jwt_test.go"],
|
||||
embed = [":go_default_library"],
|
||||
deps = ["//testing/require:go_default_library"],
|
||||
)
|
||||
@@ -1,9 +0,0 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = ["common.go"],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/client/apiutil",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = ["//consensus-types/primitives:go_default_library"],
|
||||
)
|
||||
@@ -1,30 +0,0 @@
|
||||
package apiutil
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
neturl "net/url"
|
||||
"regexp"
|
||||
"strconv"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
)
|
||||
|
||||
func ValidRoot(root string) bool {
|
||||
matchesRegex, err := regexp.MatchString("^0x[a-fA-F0-9]{64}$", root)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return matchesRegex
|
||||
}
|
||||
|
||||
func Uint64ToString[T uint64 | primitives.Slot | primitives.ValidatorIndex | primitives.CommitteeIndex | primitives.Epoch](val T) string {
|
||||
return strconv.FormatUint(uint64(val), 10)
|
||||
}
|
||||
|
||||
func BuildURL(path string, queryParams ...neturl.Values) string {
|
||||
if len(queryParams) == 0 {
|
||||
return path
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s?%s", path, queryParams[0].Encode())
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library", "go_test")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"client.go",
|
||||
"errors.go",
|
||||
"json_rest_handler.go",
|
||||
"options.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/client",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//api:go_default_library",
|
||||
"//network/httputil:go_default_library",
|
||||
"@com_github_pkg_errors//:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "go_default_test",
|
||||
srcs = [
|
||||
"client_test.go",
|
||||
"json_rest_handler_test.go",
|
||||
],
|
||||
embed = [":go_default_library"],
|
||||
deps = [
|
||||
"//api:go_default_library",
|
||||
"//api/server/structs:go_default_library",
|
||||
"//network/httputil:go_default_library",
|
||||
"//testing/assert:go_default_library",
|
||||
"//testing/require:go_default_library",
|
||||
"@com_github_pkg_errors//:go_default_library",
|
||||
],
|
||||
)
|
||||
@@ -3,34 +3,52 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test")
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"checkpoint.go",
|
||||
"client.go",
|
||||
"doc.go",
|
||||
"log.go",
|
||||
"structs.go",
|
||||
"template.go",
|
||||
"errors.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/client/beacon",
|
||||
importpath = "github.com/prysmaticlabs/prysm/api/client/beacon",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//api/client:go_default_library",
|
||||
"//api/server:go_default_library",
|
||||
"//api/server/structs:go_default_library",
|
||||
"//consensus-types/primitives:go_default_library",
|
||||
"//beacon-chain/core/helpers:go_default_library",
|
||||
"//beacon-chain/rpc/apimiddleware:go_default_library",
|
||||
"//beacon-chain/state:go_default_library",
|
||||
"//encoding/bytesutil:go_default_library",
|
||||
"//encoding/ssz/detect:go_default_library",
|
||||
"//io/file:go_default_library",
|
||||
"//network/forks:go_default_library",
|
||||
"//proto/prysm/v1alpha1:go_default_library",
|
||||
"//proto/prysm/v1alpha1/block:go_default_library",
|
||||
"//runtime/version:go_default_library",
|
||||
"//time/slots:go_default_library",
|
||||
"@com_github_ethereum_go_ethereum//common/hexutil:go_default_library",
|
||||
"@com_github_pkg_errors//:go_default_library",
|
||||
"@com_github_prysmaticlabs_eth2_types//:go_default_library",
|
||||
"@com_github_sirupsen_logrus//:go_default_library",
|
||||
"@org_golang_x_mod//semver:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "go_default_test",
|
||||
srcs = ["client_test.go"],
|
||||
srcs = [
|
||||
"checkpoint_test.go",
|
||||
"client_test.go",
|
||||
],
|
||||
embed = [":go_default_library"],
|
||||
deps = [
|
||||
"//api/client:go_default_library",
|
||||
"//beacon-chain/state:go_default_library",
|
||||
"//config/params:go_default_library",
|
||||
"//encoding/ssz/detect:go_default_library",
|
||||
"//network/forks:go_default_library",
|
||||
"//proto/prysm/v1alpha1:go_default_library",
|
||||
"//proto/prysm/v1alpha1/wrapper:go_default_library",
|
||||
"//runtime/version:go_default_library",
|
||||
"//testing/require:go_default_library",
|
||||
"//testing/util:go_default_library",
|
||||
"//time/slots:go_default_library",
|
||||
"@com_github_pkg_errors//:go_default_library",
|
||||
"@com_github_prysmaticlabs_eth2_types//:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library", "go_test")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"client.go",
|
||||
"grpc_client.go",
|
||||
"interfaces.go",
|
||||
"rest_client.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/client/beacon/chain",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//api/client:go_default_library",
|
||||
"//api/client/beacon/shared_providers:go_default_library",
|
||||
"//api/server/structs:go_default_library",
|
||||
"//config/features:go_default_library",
|
||||
"//consensus-types/primitives:go_default_library",
|
||||
"//proto/prysm/v1alpha1:go_default_library",
|
||||
"//time/slots:go_default_library",
|
||||
"//validator/helpers:go_default_library",
|
||||
"@com_github_ethereum_go_ethereum//common/hexutil:go_default_library",
|
||||
"@com_github_golang_protobuf//ptypes/empty",
|
||||
"@com_github_pkg_errors//:go_default_library",
|
||||
"@org_golang_google_grpc//:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "go_default_test",
|
||||
srcs = ["rest_client_test.go"],
|
||||
embed = [":go_default_library"],
|
||||
deps = [
|
||||
"//api/client/beacon/mock:go_default_library",
|
||||
"//api/server/structs:go_default_library",
|
||||
"//consensus-types/primitives:go_default_library",
|
||||
"//proto/prysm/v1alpha1:go_default_library",
|
||||
"//testing/assert:go_default_library",
|
||||
"//testing/require:go_default_library",
|
||||
"//time/slots:go_default_library",
|
||||
"@com_github_ethereum_go_ethereum//common/hexutil:go_default_library",
|
||||
"@org_golang_google_protobuf//types/known/emptypb:go_default_library",
|
||||
"@org_uber_go_mock//gomock:go_default_library",
|
||||
],
|
||||
)
|
||||
@@ -1,31 +0,0 @@
|
||||
package chain
|
||||
|
||||
import (
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/shared_providers"
|
||||
"github.com/prysmaticlabs/prysm/v5/config/features"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
validatorHelpers "github.com/prysmaticlabs/prysm/v5/validator/helpers"
|
||||
"google.golang.org/grpc"
|
||||
)
|
||||
|
||||
func NewClient(validatorConn validatorHelpers.NodeConnection, jsonRestHandler client.JsonRestHandler) Client {
|
||||
grpcClient := NewGrpcChainClient(validatorConn.GetGrpcClientConn())
|
||||
if features.Get().EnableBeaconRESTApi {
|
||||
return NewBeaconApiChainClientWithFallback(jsonRestHandler, grpcClient)
|
||||
} else {
|
||||
return grpcClient
|
||||
}
|
||||
}
|
||||
|
||||
func NewGrpcChainClient(cc grpc.ClientConnInterface) Client {
|
||||
return &grpcChainClient{ethpb.NewBeaconChainClient(cc)}
|
||||
}
|
||||
|
||||
func NewBeaconApiChainClientWithFallback(jsonRestHandler client.JsonRestHandler, fallbackClient Client) Client {
|
||||
return &beaconApiChainClient{
|
||||
jsonRestHandler: jsonRestHandler,
|
||||
fallbackClient: fallbackClient,
|
||||
stateValidatorsProvider: shared_providers.NewStateValidators(jsonRestHandler),
|
||||
}
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
package chain
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/golang/protobuf/ptypes/empty"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
)
|
||||
|
||||
type grpcChainClient struct {
|
||||
beaconChainClient ethpb.BeaconChainClient
|
||||
}
|
||||
|
||||
func (c *grpcChainClient) ChainHead(ctx context.Context, in *empty.Empty) (*ethpb.ChainHead, error) {
|
||||
return c.beaconChainClient.GetChainHead(ctx, in)
|
||||
}
|
||||
|
||||
func (c *grpcChainClient) ValidatorBalances(ctx context.Context, in *ethpb.ListValidatorBalancesRequest) (*ethpb.ValidatorBalances, error) {
|
||||
return c.beaconChainClient.ListValidatorBalances(ctx, in)
|
||||
}
|
||||
|
||||
func (c *grpcChainClient) Validators(ctx context.Context, in *ethpb.ListValidatorsRequest) (*ethpb.Validators, error) {
|
||||
return c.beaconChainClient.ListValidators(ctx, in)
|
||||
}
|
||||
|
||||
func (c *grpcChainClient) ValidatorQueue(ctx context.Context, in *empty.Empty) (*ethpb.ValidatorQueue, error) {
|
||||
return c.beaconChainClient.GetValidatorQueue(ctx, in)
|
||||
}
|
||||
|
||||
func (c *grpcChainClient) ValidatorPerformance(ctx context.Context, in *ethpb.ValidatorPerformanceRequest) (*ethpb.ValidatorPerformanceResponse, error) {
|
||||
return c.beaconChainClient.GetValidatorPerformance(ctx, in)
|
||||
}
|
||||
|
||||
func (c *grpcChainClient) ValidatorParticipation(ctx context.Context, in *ethpb.GetValidatorParticipationRequest) (*ethpb.ValidatorParticipationResponse, error) {
|
||||
return c.beaconChainClient.GetValidatorParticipation(ctx, in)
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
package chain
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/golang/protobuf/ptypes/empty"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
)
|
||||
|
||||
type Client interface {
|
||||
ChainHead(ctx context.Context, in *empty.Empty) (*ethpb.ChainHead, error)
|
||||
ValidatorBalances(ctx context.Context, in *ethpb.ListValidatorBalancesRequest) (*ethpb.ValidatorBalances, error)
|
||||
Validators(ctx context.Context, in *ethpb.ListValidatorsRequest) (*ethpb.Validators, error)
|
||||
ValidatorQueue(ctx context.Context, in *empty.Empty) (*ethpb.ValidatorQueue, error)
|
||||
ValidatorParticipation(ctx context.Context, in *ethpb.GetValidatorParticipationRequest) (*ethpb.ValidatorParticipationResponse, error)
|
||||
ValidatorPerformance(context.Context, *ethpb.ValidatorPerformanceRequest) (*ethpb.ValidatorPerformanceResponse, error)
|
||||
}
|
||||
@@ -1,335 +0,0 @@
|
||||
package chain
|
||||
|
||||
import (
|
||||
"context"
|
||||
"reflect"
|
||||
"strconv"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/golang/protobuf/ptypes/empty"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/shared_providers"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/v5/time/slots"
|
||||
)
|
||||
|
||||
type beaconApiChainClient struct {
|
||||
fallbackClient Client
|
||||
jsonRestHandler client.JsonRestHandler
|
||||
stateValidatorsProvider shared_providers.StateValidators
|
||||
}
|
||||
|
||||
func (c beaconApiChainClient) headBlockHeaders(ctx context.Context) (*structs.GetBlockHeaderResponse, error) {
|
||||
blockHeader := structs.GetBlockHeaderResponse{}
|
||||
err := c.jsonRestHandler.Get(ctx, "/eth/v1/beacon/headers/head", &blockHeader)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if blockHeader.Data == nil || blockHeader.Data.Header == nil {
|
||||
return nil, errors.New("block header data is nil")
|
||||
}
|
||||
|
||||
if blockHeader.Data.Header.Message == nil {
|
||||
return nil, errors.New("block header message is nil")
|
||||
}
|
||||
|
||||
return &blockHeader, nil
|
||||
}
|
||||
|
||||
func (c beaconApiChainClient) ChainHead(ctx context.Context, _ *empty.Empty) (*ethpb.ChainHead, error) {
|
||||
const endpoint = "/eth/v1/beacon/states/head/finality_checkpoints"
|
||||
|
||||
finalityCheckpoints := structs.GetFinalityCheckpointsResponse{}
|
||||
if err := c.jsonRestHandler.Get(ctx, endpoint, &finalityCheckpoints); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if finalityCheckpoints.Data == nil {
|
||||
return nil, errors.New("finality checkpoints data is nil")
|
||||
}
|
||||
|
||||
if finalityCheckpoints.Data.Finalized == nil {
|
||||
return nil, errors.New("finalized checkpoint is nil")
|
||||
}
|
||||
|
||||
finalizedEpoch, err := strconv.ParseUint(finalityCheckpoints.Data.Finalized.Epoch, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse finalized epoch `%s`", finalityCheckpoints.Data.Finalized.Epoch)
|
||||
}
|
||||
|
||||
finalizedSlot, err := slots.EpochStart(primitives.Epoch(finalizedEpoch))
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to get first slot for epoch `%d`", finalizedEpoch)
|
||||
}
|
||||
|
||||
finalizedRoot, err := hexutil.Decode(finalityCheckpoints.Data.Finalized.Root)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode finalized checkpoint root `%s`", finalityCheckpoints.Data.Finalized.Root)
|
||||
}
|
||||
|
||||
if finalityCheckpoints.Data.CurrentJustified == nil {
|
||||
return nil, errors.New("current justified checkpoint is nil")
|
||||
}
|
||||
|
||||
justifiedEpoch, err := strconv.ParseUint(finalityCheckpoints.Data.CurrentJustified.Epoch, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse current justified checkpoint epoch `%s`", finalityCheckpoints.Data.CurrentJustified.Epoch)
|
||||
}
|
||||
|
||||
justifiedSlot, err := slots.EpochStart(primitives.Epoch(justifiedEpoch))
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to get first slot for epoch `%d`", justifiedEpoch)
|
||||
}
|
||||
|
||||
justifiedRoot, err := hexutil.Decode(finalityCheckpoints.Data.CurrentJustified.Root)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode current justified checkpoint root `%s`", finalityCheckpoints.Data.CurrentJustified.Root)
|
||||
}
|
||||
|
||||
if finalityCheckpoints.Data.PreviousJustified == nil {
|
||||
return nil, errors.New("previous justified checkpoint is nil")
|
||||
}
|
||||
|
||||
previousJustifiedEpoch, err := strconv.ParseUint(finalityCheckpoints.Data.PreviousJustified.Epoch, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse previous justified checkpoint epoch `%s`", finalityCheckpoints.Data.PreviousJustified.Epoch)
|
||||
}
|
||||
|
||||
previousJustifiedSlot, err := slots.EpochStart(primitives.Epoch(previousJustifiedEpoch))
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to get first slot for epoch `%d`", previousJustifiedEpoch)
|
||||
}
|
||||
|
||||
previousJustifiedRoot, err := hexutil.Decode(finalityCheckpoints.Data.PreviousJustified.Root)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode previous justified checkpoint root `%s`", finalityCheckpoints.Data.PreviousJustified.Root)
|
||||
}
|
||||
|
||||
blockHeader, err := c.headBlockHeaders(ctx)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get head block headers")
|
||||
}
|
||||
|
||||
headSlot, err := strconv.ParseUint(blockHeader.Data.Header.Message.Slot, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse head block slot `%s`", blockHeader.Data.Header.Message.Slot)
|
||||
}
|
||||
|
||||
headEpoch := slots.ToEpoch(primitives.Slot(headSlot))
|
||||
|
||||
headBlockRoot, err := hexutil.Decode(blockHeader.Data.Root)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode head block root `%s`", blockHeader.Data.Root)
|
||||
}
|
||||
|
||||
return ðpb.ChainHead{
|
||||
HeadSlot: primitives.Slot(headSlot),
|
||||
HeadEpoch: headEpoch,
|
||||
HeadBlockRoot: headBlockRoot,
|
||||
FinalizedSlot: finalizedSlot,
|
||||
FinalizedEpoch: primitives.Epoch(finalizedEpoch),
|
||||
FinalizedBlockRoot: finalizedRoot,
|
||||
JustifiedSlot: justifiedSlot,
|
||||
JustifiedEpoch: primitives.Epoch(justifiedEpoch),
|
||||
JustifiedBlockRoot: justifiedRoot,
|
||||
PreviousJustifiedSlot: previousJustifiedSlot,
|
||||
PreviousJustifiedEpoch: primitives.Epoch(previousJustifiedEpoch),
|
||||
PreviousJustifiedBlockRoot: previousJustifiedRoot,
|
||||
OptimisticStatus: blockHeader.ExecutionOptimistic,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (c beaconApiChainClient) ValidatorBalances(ctx context.Context, in *ethpb.ListValidatorBalancesRequest) (*ethpb.ValidatorBalances, error) {
|
||||
if c.fallbackClient != nil {
|
||||
return c.fallbackClient.ValidatorBalances(ctx, in)
|
||||
}
|
||||
|
||||
// TODO: Implement me
|
||||
return nil, errors.New("beaconApiChainClient.ValidatorBalances is not implemented. To use a fallback client, pass a fallback client as the last argument of NewBeaconApiChainClientWithFallback.")
|
||||
}
|
||||
|
||||
func (c beaconApiChainClient) Validators(ctx context.Context, in *ethpb.ListValidatorsRequest) (*ethpb.Validators, error) {
|
||||
pageSize := in.PageSize
|
||||
|
||||
// We follow the gRPC behavior here, which returns a maximum of 250 results when pageSize == 0
|
||||
if pageSize == 0 {
|
||||
pageSize = 250
|
||||
}
|
||||
|
||||
var pageToken uint64
|
||||
var err error
|
||||
|
||||
if in.PageToken != "" {
|
||||
if pageToken, err = strconv.ParseUint(in.PageToken, 10, 64); err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse page token `%s`", in.PageToken)
|
||||
}
|
||||
}
|
||||
|
||||
var statuses []string
|
||||
if in.Active {
|
||||
statuses = []string{"active"}
|
||||
}
|
||||
|
||||
pubkeys := make([]string, len(in.PublicKeys))
|
||||
for idx, pubkey := range in.PublicKeys {
|
||||
pubkeys[idx] = hexutil.Encode(pubkey)
|
||||
}
|
||||
|
||||
var stateValidators *structs.GetValidatorsResponse
|
||||
var epoch primitives.Epoch
|
||||
|
||||
switch queryFilter := in.QueryFilter.(type) {
|
||||
case *ethpb.ListValidatorsRequest_Epoch:
|
||||
slot, err := slots.EpochStart(queryFilter.Epoch)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to get first slot for epoch `%d`", queryFilter.Epoch)
|
||||
}
|
||||
if stateValidators, err = c.stateValidatorsProvider.StateValidatorsForSlot(ctx, slot, pubkeys, in.Indices, statuses); err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to get state validators for slot `%d`", slot)
|
||||
}
|
||||
epoch = slots.ToEpoch(slot)
|
||||
case *ethpb.ListValidatorsRequest_Genesis:
|
||||
if stateValidators, err = c.stateValidatorsProvider.StateValidatorsForSlot(ctx, 0, pubkeys, in.Indices, statuses); err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to get genesis state validators")
|
||||
}
|
||||
epoch = 0
|
||||
case nil:
|
||||
if stateValidators, err = c.stateValidatorsProvider.StateValidatorsForHead(ctx, pubkeys, in.Indices, statuses); err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get head state validators")
|
||||
}
|
||||
|
||||
blockHeader, err := c.headBlockHeaders(ctx)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get head block headers")
|
||||
}
|
||||
|
||||
slot, err := strconv.ParseUint(blockHeader.Data.Header.Message.Slot, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse header slot `%s`", blockHeader.Data.Header.Message.Slot)
|
||||
}
|
||||
|
||||
epoch = slots.ToEpoch(primitives.Slot(slot))
|
||||
default:
|
||||
return nil, errors.Errorf("unsupported query filter type `%v`", reflect.TypeOf(queryFilter))
|
||||
}
|
||||
|
||||
if stateValidators.Data == nil {
|
||||
return nil, errors.New("state validators data is nil")
|
||||
}
|
||||
|
||||
start := pageToken * uint64(pageSize)
|
||||
if start > uint64(len(stateValidators.Data)) {
|
||||
start = uint64(len(stateValidators.Data))
|
||||
}
|
||||
|
||||
end := start + uint64(pageSize)
|
||||
if end > uint64(len(stateValidators.Data)) {
|
||||
end = uint64(len(stateValidators.Data))
|
||||
}
|
||||
|
||||
validators := make([]*ethpb.Validators_ValidatorContainer, end-start)
|
||||
for idx := start; idx < end; idx++ {
|
||||
stateValidator := stateValidators.Data[idx]
|
||||
|
||||
if stateValidator.Validator == nil {
|
||||
return nil, errors.Errorf("state validator at index `%d` is nil", idx)
|
||||
}
|
||||
|
||||
pubkey, err := hexutil.Decode(stateValidator.Validator.Pubkey)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode validator pubkey `%s`", stateValidator.Validator.Pubkey)
|
||||
}
|
||||
|
||||
withdrawalCredentials, err := hexutil.Decode(stateValidator.Validator.WithdrawalCredentials)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode validator withdrawal credentials `%s`", stateValidator.Validator.WithdrawalCredentials)
|
||||
}
|
||||
|
||||
effectiveBalance, err := strconv.ParseUint(stateValidator.Validator.EffectiveBalance, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse validator effective balance `%s`", stateValidator.Validator.EffectiveBalance)
|
||||
}
|
||||
|
||||
validatorIndex, err := strconv.ParseUint(stateValidator.Index, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse validator index `%s`", stateValidator.Index)
|
||||
}
|
||||
|
||||
activationEligibilityEpoch, err := strconv.ParseUint(stateValidator.Validator.ActivationEligibilityEpoch, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse validator activation eligibility epoch `%s`", stateValidator.Validator.ActivationEligibilityEpoch)
|
||||
}
|
||||
|
||||
activationEpoch, err := strconv.ParseUint(stateValidator.Validator.ActivationEpoch, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse validator activation epoch `%s`", stateValidator.Validator.ActivationEpoch)
|
||||
}
|
||||
|
||||
exitEpoch, err := strconv.ParseUint(stateValidator.Validator.ExitEpoch, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse validator exit epoch `%s`", stateValidator.Validator.ExitEpoch)
|
||||
}
|
||||
|
||||
withdrawableEpoch, err := strconv.ParseUint(stateValidator.Validator.WithdrawableEpoch, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse validator withdrawable epoch `%s`", stateValidator.Validator.WithdrawableEpoch)
|
||||
}
|
||||
|
||||
validators[idx-start] = ðpb.Validators_ValidatorContainer{
|
||||
Index: primitives.ValidatorIndex(validatorIndex),
|
||||
Validator: ðpb.Validator{
|
||||
PublicKey: pubkey,
|
||||
WithdrawalCredentials: withdrawalCredentials,
|
||||
EffectiveBalance: effectiveBalance,
|
||||
Slashed: stateValidator.Validator.Slashed,
|
||||
ActivationEligibilityEpoch: primitives.Epoch(activationEligibilityEpoch),
|
||||
ActivationEpoch: primitives.Epoch(activationEpoch),
|
||||
ExitEpoch: primitives.Epoch(exitEpoch),
|
||||
WithdrawableEpoch: primitives.Epoch(withdrawableEpoch),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
var nextPageToken string
|
||||
if end < uint64(len(stateValidators.Data)) {
|
||||
nextPageToken = strconv.FormatUint(pageToken+1, 10)
|
||||
}
|
||||
|
||||
return ðpb.Validators{
|
||||
TotalSize: int32(len(stateValidators.Data)),
|
||||
Epoch: epoch,
|
||||
ValidatorList: validators,
|
||||
NextPageToken: nextPageToken,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (c beaconApiChainClient) ValidatorQueue(ctx context.Context, in *empty.Empty) (*ethpb.ValidatorQueue, error) {
|
||||
if c.fallbackClient != nil {
|
||||
return c.fallbackClient.ValidatorQueue(ctx, in)
|
||||
}
|
||||
|
||||
// TODO: Implement me
|
||||
return nil, errors.New("beaconApiChainClient.ValidatorQueue is not implemented. To use a fallback client, pass a fallback client as the last argument of NewBeaconApiChainClientWithFallback.")
|
||||
}
|
||||
|
||||
func (c beaconApiChainClient) ValidatorPerformance(ctx context.Context, in *ethpb.ValidatorPerformanceRequest) (*ethpb.ValidatorPerformanceResponse, error) {
|
||||
if c.fallbackClient != nil {
|
||||
return c.fallbackClient.ValidatorPerformance(ctx, in)
|
||||
}
|
||||
|
||||
// TODO: Implement me
|
||||
return nil, errors.New("beaconApiChainClient.ValidatorPerformance is not implemented. To use a fallback client, pass a fallback client as the last argument of NewBeaconApiChainClientWithFallback.")
|
||||
}
|
||||
|
||||
func (c beaconApiChainClient) ValidatorParticipation(ctx context.Context, in *ethpb.GetValidatorParticipationRequest) (*ethpb.ValidatorParticipationResponse, error) {
|
||||
if c.fallbackClient != nil {
|
||||
return c.fallbackClient.ValidatorParticipation(ctx, in)
|
||||
}
|
||||
|
||||
// TODO: Implement me
|
||||
return nil, errors.New("beaconApiChainClient.ValidatorParticipation is not implemented. To use a fallback client, pass a fallback client as the last argument of NewBeaconApiChainClientWithFallback.")
|
||||
}
|
||||
@@ -1,918 +0,0 @@
|
||||
package chain
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/mock"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
"github.com/prysmaticlabs/prysm/v5/time/slots"
|
||||
"go.uber.org/mock/gomock"
|
||||
"google.golang.org/protobuf/types/known/emptypb"
|
||||
)
|
||||
|
||||
func TestListValidators(t *testing.T) {
|
||||
const blockHeaderEndpoint = "/eth/v1/beacon/headers/head"
|
||||
|
||||
t.Run("invalid token", func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
ctx := context.Background()
|
||||
|
||||
beaconChainClient := beaconApiChainClient{}
|
||||
_, err := beaconChainClient.Validators(ctx, ðpb.ListValidatorsRequest{
|
||||
PageToken: "foo",
|
||||
})
|
||||
assert.ErrorContains(t, "failed to parse page token `foo`", err)
|
||||
})
|
||||
|
||||
t.Run("query filter epoch overflow", func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
ctx := context.Background()
|
||||
|
||||
beaconChainClient := beaconApiChainClient{}
|
||||
_, err := beaconChainClient.Validators(ctx, ðpb.ListValidatorsRequest{
|
||||
QueryFilter: ðpb.ListValidatorsRequest_Epoch{
|
||||
Epoch: math.MaxUint64,
|
||||
},
|
||||
})
|
||||
assert.ErrorContains(t, fmt.Sprintf("failed to get first slot for epoch `%d`", uint64(math.MaxUint64)), err)
|
||||
})
|
||||
|
||||
t.Run("fails to get validators for epoch filter", func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
ctx := context.Background()
|
||||
|
||||
stateValidatorsProvider := mock.NewMockStateValidatorsProvider(ctrl)
|
||||
stateValidatorsProvider.EXPECT().StateValidatorsForSlot(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(
|
||||
nil,
|
||||
errors.New("foo error"),
|
||||
)
|
||||
|
||||
beaconChainClient := beaconApiChainClient{stateValidatorsProvider: stateValidatorsProvider}
|
||||
_, err := beaconChainClient.Validators(ctx, ðpb.ListValidatorsRequest{
|
||||
QueryFilter: ðpb.ListValidatorsRequest_Epoch{
|
||||
Epoch: 0,
|
||||
},
|
||||
})
|
||||
assert.ErrorContains(t, "failed to get state validators for slot `0`: foo error", err)
|
||||
})
|
||||
|
||||
t.Run("fails to get validators for genesis filter", func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
ctx := context.Background()
|
||||
|
||||
stateValidatorsProvider := mock.NewMockStateValidatorsProvider(ctrl)
|
||||
stateValidatorsProvider.EXPECT().StateValidatorsForSlot(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(
|
||||
nil,
|
||||
errors.New("bar error"),
|
||||
)
|
||||
|
||||
beaconChainClient := beaconApiChainClient{stateValidatorsProvider: stateValidatorsProvider}
|
||||
_, err := beaconChainClient.Validators(ctx, ðpb.ListValidatorsRequest{
|
||||
QueryFilter: ðpb.ListValidatorsRequest_Genesis{},
|
||||
})
|
||||
assert.ErrorContains(t, "failed to get genesis state validators: bar error", err)
|
||||
})
|
||||
|
||||
t.Run("fails to get validators for nil filter", func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
ctx := context.Background()
|
||||
|
||||
stateValidatorsProvider := mock.NewMockStateValidatorsProvider(ctrl)
|
||||
stateValidatorsProvider.EXPECT().StateValidatorsForHead(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(
|
||||
nil,
|
||||
errors.New("foo error"),
|
||||
)
|
||||
|
||||
beaconChainClient := beaconApiChainClient{stateValidatorsProvider: stateValidatorsProvider}
|
||||
_, err := beaconChainClient.Validators(ctx, ðpb.ListValidatorsRequest{
|
||||
QueryFilter: nil,
|
||||
})
|
||||
assert.ErrorContains(t, "failed to get head state validators: foo error", err)
|
||||
})
|
||||
|
||||
t.Run("fails to get latest block header for nil filter", func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
ctx := context.Background()
|
||||
|
||||
stateValidatorsProvider := mock.NewMockStateValidatorsProvider(ctrl)
|
||||
stateValidatorsProvider.EXPECT().StateValidatorsForHead(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(
|
||||
nil,
|
||||
nil,
|
||||
)
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Get(gomock.Any(), blockHeaderEndpoint, gomock.Any()).Return(errors.New("bar error"))
|
||||
|
||||
beaconChainClient := beaconApiChainClient{
|
||||
stateValidatorsProvider: stateValidatorsProvider,
|
||||
jsonRestHandler: jsonRestHandler,
|
||||
}
|
||||
_, err := beaconChainClient.Validators(ctx, ðpb.ListValidatorsRequest{
|
||||
QueryFilter: nil,
|
||||
})
|
||||
assert.ErrorContains(t, "bar error", err)
|
||||
})
|
||||
|
||||
t.Run("fails to read block header response", func(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
expectedError string
|
||||
blockHeaderResponse structs.GetBlockHeaderResponse
|
||||
}{
|
||||
{
|
||||
name: "nil data",
|
||||
blockHeaderResponse: structs.GetBlockHeaderResponse{
|
||||
Data: nil,
|
||||
},
|
||||
expectedError: "block header data is nil",
|
||||
},
|
||||
{
|
||||
name: "nil data header",
|
||||
blockHeaderResponse: structs.GetBlockHeaderResponse{
|
||||
Data: &structs.SignedBeaconBlockHeaderContainer{
|
||||
Header: nil,
|
||||
},
|
||||
},
|
||||
expectedError: "block header data is nil",
|
||||
},
|
||||
{
|
||||
name: "nil message",
|
||||
blockHeaderResponse: structs.GetBlockHeaderResponse{
|
||||
Data: &structs.SignedBeaconBlockHeaderContainer{
|
||||
Header: &structs.SignedBeaconBlockHeader{
|
||||
Message: nil,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: "block header message is nil",
|
||||
},
|
||||
{
|
||||
name: "invalid header slot",
|
||||
blockHeaderResponse: structs.GetBlockHeaderResponse{
|
||||
Data: &structs.SignedBeaconBlockHeaderContainer{
|
||||
Header: &structs.SignedBeaconBlockHeader{
|
||||
Message: &structs.BeaconBlockHeader{
|
||||
Slot: "foo",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: "failed to parse header slot `foo`",
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
ctx := context.Background()
|
||||
|
||||
stateValidatorsProvider := mock.NewMockStateValidatorsProvider(ctrl)
|
||||
stateValidatorsProvider.EXPECT().StateValidatorsForHead(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(
|
||||
nil,
|
||||
nil,
|
||||
)
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Get(gomock.Any(), blockHeaderEndpoint, gomock.Any()).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
testCase.blockHeaderResponse,
|
||||
)
|
||||
|
||||
beaconChainClient := beaconApiChainClient{
|
||||
stateValidatorsProvider: stateValidatorsProvider,
|
||||
jsonRestHandler: jsonRestHandler,
|
||||
}
|
||||
_, err := beaconChainClient.Validators(ctx, ðpb.ListValidatorsRequest{
|
||||
QueryFilter: nil,
|
||||
})
|
||||
assert.ErrorContains(t, testCase.expectedError, err)
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("fails to get validators for genesis filter", func(t *testing.T) {
|
||||
generateValidStateValidatorsResponse := func() *structs.GetValidatorsResponse {
|
||||
return &structs.GetValidatorsResponse{
|
||||
Data: []*structs.ValidatorContainer{
|
||||
{
|
||||
Index: "1",
|
||||
Validator: &structs.Validator{
|
||||
Pubkey: hexutil.Encode([]byte{3}),
|
||||
WithdrawalCredentials: hexutil.Encode([]byte{4}),
|
||||
EffectiveBalance: "5",
|
||||
Slashed: true,
|
||||
ActivationEligibilityEpoch: "6",
|
||||
ActivationEpoch: "7",
|
||||
ExitEpoch: "8",
|
||||
WithdrawableEpoch: "9",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
generateStateValidatorsResponse func() *structs.GetValidatorsResponse
|
||||
expectedError string
|
||||
}{
|
||||
{
|
||||
name: "nil validator",
|
||||
generateStateValidatorsResponse: func() *structs.GetValidatorsResponse {
|
||||
validatorsResponse := generateValidStateValidatorsResponse()
|
||||
validatorsResponse.Data[0].Validator = nil
|
||||
return validatorsResponse
|
||||
},
|
||||
expectedError: "state validator at index `0` is nil",
|
||||
},
|
||||
{
|
||||
name: "invalid pubkey",
|
||||
generateStateValidatorsResponse: func() *structs.GetValidatorsResponse {
|
||||
validatorsResponse := generateValidStateValidatorsResponse()
|
||||
validatorsResponse.Data[0].Validator.Pubkey = "foo"
|
||||
return validatorsResponse
|
||||
},
|
||||
expectedError: "failed to decode validator pubkey `foo`",
|
||||
},
|
||||
{
|
||||
name: "invalid withdrawal credentials",
|
||||
generateStateValidatorsResponse: func() *structs.GetValidatorsResponse {
|
||||
validatorsResponse := generateValidStateValidatorsResponse()
|
||||
validatorsResponse.Data[0].Validator.WithdrawalCredentials = "bar"
|
||||
return validatorsResponse
|
||||
},
|
||||
expectedError: "failed to decode validator withdrawal credentials `bar`",
|
||||
},
|
||||
{
|
||||
name: "invalid effective balance",
|
||||
generateStateValidatorsResponse: func() *structs.GetValidatorsResponse {
|
||||
validatorsResponse := generateValidStateValidatorsResponse()
|
||||
validatorsResponse.Data[0].Validator.EffectiveBalance = "foo"
|
||||
return validatorsResponse
|
||||
},
|
||||
expectedError: "failed to parse validator effective balance `foo`",
|
||||
},
|
||||
{
|
||||
name: "invalid validator index",
|
||||
generateStateValidatorsResponse: func() *structs.GetValidatorsResponse {
|
||||
validatorsResponse := generateValidStateValidatorsResponse()
|
||||
validatorsResponse.Data[0].Index = "bar"
|
||||
return validatorsResponse
|
||||
},
|
||||
expectedError: "failed to parse validator index `bar`",
|
||||
},
|
||||
{
|
||||
name: "invalid activation eligibility epoch",
|
||||
generateStateValidatorsResponse: func() *structs.GetValidatorsResponse {
|
||||
validatorsResponse := generateValidStateValidatorsResponse()
|
||||
validatorsResponse.Data[0].Validator.ActivationEligibilityEpoch = "foo"
|
||||
return validatorsResponse
|
||||
},
|
||||
expectedError: "failed to parse validator activation eligibility epoch `foo`",
|
||||
},
|
||||
{
|
||||
name: "invalid activation epoch",
|
||||
generateStateValidatorsResponse: func() *structs.GetValidatorsResponse {
|
||||
validatorsResponse := generateValidStateValidatorsResponse()
|
||||
validatorsResponse.Data[0].Validator.ActivationEpoch = "bar"
|
||||
return validatorsResponse
|
||||
},
|
||||
expectedError: "failed to parse validator activation epoch `bar`",
|
||||
},
|
||||
{
|
||||
name: "invalid exit epoch",
|
||||
generateStateValidatorsResponse: func() *structs.GetValidatorsResponse {
|
||||
validatorsResponse := generateValidStateValidatorsResponse()
|
||||
validatorsResponse.Data[0].Validator.ExitEpoch = "foo"
|
||||
return validatorsResponse
|
||||
},
|
||||
expectedError: "failed to parse validator exit epoch `foo`",
|
||||
},
|
||||
{
|
||||
name: "invalid withdrawable epoch",
|
||||
generateStateValidatorsResponse: func() *structs.GetValidatorsResponse {
|
||||
validatorsResponse := generateValidStateValidatorsResponse()
|
||||
validatorsResponse.Data[0].Validator.WithdrawableEpoch = "bar"
|
||||
return validatorsResponse
|
||||
},
|
||||
expectedError: "failed to parse validator withdrawable epoch `bar`",
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
ctx := context.Background()
|
||||
|
||||
stateValidatorsProvider := mock.NewMockStateValidatorsProvider(ctrl)
|
||||
stateValidatorsProvider.EXPECT().StateValidatorsForSlot(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(
|
||||
testCase.generateStateValidatorsResponse(),
|
||||
nil,
|
||||
)
|
||||
|
||||
beaconChainClient := beaconApiChainClient{stateValidatorsProvider: stateValidatorsProvider}
|
||||
_, err := beaconChainClient.Validators(ctx, ðpb.ListValidatorsRequest{
|
||||
QueryFilter: ðpb.ListValidatorsRequest_Genesis{},
|
||||
})
|
||||
assert.ErrorContains(t, testCase.expectedError, err)
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("correctly returns the expected validators", func(t *testing.T) {
|
||||
generateValidStateValidatorsResponse := func() *structs.GetValidatorsResponse {
|
||||
return &structs.GetValidatorsResponse{
|
||||
Data: []*structs.ValidatorContainer{
|
||||
{
|
||||
Index: "1",
|
||||
Validator: &structs.Validator{
|
||||
Pubkey: hexutil.Encode([]byte{2}),
|
||||
WithdrawalCredentials: hexutil.Encode([]byte{3}),
|
||||
EffectiveBalance: "4",
|
||||
Slashed: true,
|
||||
ActivationEligibilityEpoch: "5",
|
||||
ActivationEpoch: "6",
|
||||
ExitEpoch: "7",
|
||||
WithdrawableEpoch: "8",
|
||||
},
|
||||
},
|
||||
{
|
||||
Index: "9",
|
||||
Validator: &structs.Validator{
|
||||
Pubkey: hexutil.Encode([]byte{10}),
|
||||
WithdrawalCredentials: hexutil.Encode([]byte{11}),
|
||||
EffectiveBalance: "12",
|
||||
Slashed: false,
|
||||
ActivationEligibilityEpoch: "13",
|
||||
ActivationEpoch: "14",
|
||||
ExitEpoch: "15",
|
||||
WithdrawableEpoch: "16",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
generateJsonStateValidatorsResponse func() *structs.GetValidatorsResponse
|
||||
generateProtoValidatorsResponse func() *ethpb.Validators
|
||||
pubkeys [][]byte
|
||||
pubkeyStrings []string
|
||||
indices []primitives.ValidatorIndex
|
||||
statuses []string
|
||||
pageSize int32
|
||||
pageToken string
|
||||
}{
|
||||
{
|
||||
name: "page size 0",
|
||||
generateJsonStateValidatorsResponse: func() *structs.GetValidatorsResponse {
|
||||
validValidatorsResponse := generateValidStateValidatorsResponse()
|
||||
|
||||
// Generate more than 250 validators, but expect only 250 to be returned
|
||||
validators := make([]*structs.ValidatorContainer, 267)
|
||||
for idx := 0; idx < len(validators); idx++ {
|
||||
validators[idx] = validValidatorsResponse.Data[0]
|
||||
}
|
||||
|
||||
validatorsResponse := &structs.GetValidatorsResponse{
|
||||
Data: validators,
|
||||
}
|
||||
|
||||
return validatorsResponse
|
||||
},
|
||||
generateProtoValidatorsResponse: func() *ethpb.Validators {
|
||||
validators := make([]*ethpb.Validators_ValidatorContainer, 250)
|
||||
for idx := 0; idx < len(validators); idx++ {
|
||||
validators[idx] = ðpb.Validators_ValidatorContainer{
|
||||
Index: 1,
|
||||
Validator: ðpb.Validator{
|
||||
PublicKey: []byte{2},
|
||||
WithdrawalCredentials: []byte{3},
|
||||
EffectiveBalance: 4,
|
||||
Slashed: true,
|
||||
ActivationEligibilityEpoch: 5,
|
||||
ActivationEpoch: 6,
|
||||
ExitEpoch: 7,
|
||||
WithdrawableEpoch: 8,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return ðpb.Validators{
|
||||
ValidatorList: validators,
|
||||
TotalSize: 267,
|
||||
Epoch: 0,
|
||||
NextPageToken: "1",
|
||||
}
|
||||
},
|
||||
pubkeys: [][]byte{},
|
||||
pubkeyStrings: make([]string, 0),
|
||||
indices: []primitives.ValidatorIndex{},
|
||||
statuses: nil,
|
||||
pageSize: 0,
|
||||
pageToken: "",
|
||||
},
|
||||
{
|
||||
name: "pageSize==1 and pageToken==0",
|
||||
generateJsonStateValidatorsResponse: generateValidStateValidatorsResponse,
|
||||
generateProtoValidatorsResponse: func() *ethpb.Validators {
|
||||
return ðpb.Validators{
|
||||
ValidatorList: []*ethpb.Validators_ValidatorContainer{
|
||||
{
|
||||
Index: 1,
|
||||
Validator: ðpb.Validator{
|
||||
PublicKey: []byte{2},
|
||||
WithdrawalCredentials: []byte{3},
|
||||
EffectiveBalance: 4,
|
||||
Slashed: true,
|
||||
ActivationEligibilityEpoch: 5,
|
||||
ActivationEpoch: 6,
|
||||
ExitEpoch: 7,
|
||||
WithdrawableEpoch: 8,
|
||||
},
|
||||
},
|
||||
},
|
||||
TotalSize: 2,
|
||||
Epoch: 0,
|
||||
NextPageToken: "1",
|
||||
}
|
||||
},
|
||||
pageSize: 1,
|
||||
pageToken: "0",
|
||||
},
|
||||
{
|
||||
name: "pageSize==2 and pageToken==0",
|
||||
generateJsonStateValidatorsResponse: generateValidStateValidatorsResponse,
|
||||
generateProtoValidatorsResponse: func() *ethpb.Validators {
|
||||
return ðpb.Validators{
|
||||
ValidatorList: []*ethpb.Validators_ValidatorContainer{
|
||||
{
|
||||
Index: 1,
|
||||
Validator: ðpb.Validator{
|
||||
PublicKey: []byte{2},
|
||||
WithdrawalCredentials: []byte{3},
|
||||
EffectiveBalance: 4,
|
||||
Slashed: true,
|
||||
ActivationEligibilityEpoch: 5,
|
||||
ActivationEpoch: 6,
|
||||
ExitEpoch: 7,
|
||||
WithdrawableEpoch: 8,
|
||||
},
|
||||
},
|
||||
{
|
||||
Index: 9,
|
||||
Validator: ðpb.Validator{
|
||||
PublicKey: []byte{10},
|
||||
WithdrawalCredentials: []byte{11},
|
||||
EffectiveBalance: 12,
|
||||
Slashed: false,
|
||||
ActivationEligibilityEpoch: 13,
|
||||
ActivationEpoch: 14,
|
||||
ExitEpoch: 15,
|
||||
WithdrawableEpoch: 16,
|
||||
},
|
||||
},
|
||||
},
|
||||
TotalSize: 2,
|
||||
Epoch: 0,
|
||||
NextPageToken: "",
|
||||
}
|
||||
},
|
||||
pageSize: 2,
|
||||
pageToken: "0",
|
||||
},
|
||||
{
|
||||
name: "pageSize==1 and pageToken==1",
|
||||
generateJsonStateValidatorsResponse: generateValidStateValidatorsResponse,
|
||||
generateProtoValidatorsResponse: func() *ethpb.Validators {
|
||||
return ðpb.Validators{
|
||||
ValidatorList: []*ethpb.Validators_ValidatorContainer{
|
||||
{
|
||||
Index: 9,
|
||||
Validator: ðpb.Validator{
|
||||
PublicKey: []byte{10},
|
||||
WithdrawalCredentials: []byte{11},
|
||||
EffectiveBalance: 12,
|
||||
Slashed: false,
|
||||
ActivationEligibilityEpoch: 13,
|
||||
ActivationEpoch: 14,
|
||||
ExitEpoch: 15,
|
||||
WithdrawableEpoch: 16,
|
||||
},
|
||||
},
|
||||
},
|
||||
TotalSize: 2,
|
||||
Epoch: 0,
|
||||
NextPageToken: "",
|
||||
}
|
||||
},
|
||||
pageSize: 1,
|
||||
pageToken: "1",
|
||||
},
|
||||
{
|
||||
name: "pageSize==1 and pageToken==2",
|
||||
generateJsonStateValidatorsResponse: generateValidStateValidatorsResponse,
|
||||
generateProtoValidatorsResponse: func() *ethpb.Validators {
|
||||
return ðpb.Validators{
|
||||
ValidatorList: []*ethpb.Validators_ValidatorContainer{},
|
||||
TotalSize: 2,
|
||||
Epoch: 0,
|
||||
NextPageToken: "",
|
||||
}
|
||||
},
|
||||
pageSize: 1,
|
||||
pageToken: "2",
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
ctx := context.Background()
|
||||
|
||||
stateValidatorsProvider := mock.NewMockStateValidatorsProvider(ctrl)
|
||||
stateValidatorsProvider.EXPECT().StateValidatorsForSlot(gomock.Any(), primitives.Slot(0), make([]string, 0), []primitives.ValidatorIndex{}, nil).Return(
|
||||
testCase.generateJsonStateValidatorsResponse(),
|
||||
nil,
|
||||
)
|
||||
|
||||
beaconChainClient := beaconApiChainClient{stateValidatorsProvider: stateValidatorsProvider}
|
||||
validators, err := beaconChainClient.Validators(ctx, ðpb.ListValidatorsRequest{
|
||||
QueryFilter: ðpb.ListValidatorsRequest_Genesis{},
|
||||
PublicKeys: [][]byte{},
|
||||
Indices: []primitives.ValidatorIndex{},
|
||||
Active: false,
|
||||
PageSize: testCase.pageSize,
|
||||
PageToken: testCase.pageToken,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, validators)
|
||||
|
||||
expectedValidators := testCase.generateProtoValidatorsResponse()
|
||||
assert.DeepEqual(t, expectedValidators, validators)
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetChainHead(t *testing.T) {
|
||||
const finalityCheckpointsEndpoint = "/eth/v1/beacon/states/head/finality_checkpoints"
|
||||
const headBlockHeadersEndpoint = "/eth/v1/beacon/headers/head"
|
||||
|
||||
generateValidFinalityCheckpointsResponse := func() structs.GetFinalityCheckpointsResponse {
|
||||
return structs.GetFinalityCheckpointsResponse{
|
||||
Data: &structs.FinalityCheckpoints{
|
||||
PreviousJustified: &structs.Checkpoint{
|
||||
Epoch: "1",
|
||||
Root: hexutil.Encode([]byte{2}),
|
||||
},
|
||||
CurrentJustified: &structs.Checkpoint{
|
||||
Epoch: "3",
|
||||
Root: hexutil.Encode([]byte{4}),
|
||||
},
|
||||
Finalized: &structs.Checkpoint{
|
||||
Epoch: "5",
|
||||
Root: hexutil.Encode([]byte{6}),
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
t.Run("fails to get finality checkpoints", func(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
generateFinalityCheckpointsResponse func() structs.GetFinalityCheckpointsResponse
|
||||
finalityCheckpointsError error
|
||||
expectedError string
|
||||
}{
|
||||
{
|
||||
name: "query failed",
|
||||
finalityCheckpointsError: errors.New("foo error"),
|
||||
expectedError: "foo error",
|
||||
generateFinalityCheckpointsResponse: func() structs.GetFinalityCheckpointsResponse {
|
||||
return structs.GetFinalityCheckpointsResponse{}
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nil finality checkpoints data",
|
||||
expectedError: "finality checkpoints data is nil",
|
||||
generateFinalityCheckpointsResponse: func() structs.GetFinalityCheckpointsResponse {
|
||||
validResponse := generateValidFinalityCheckpointsResponse()
|
||||
validResponse.Data = nil
|
||||
return validResponse
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nil finalized checkpoint",
|
||||
expectedError: "finalized checkpoint is nil",
|
||||
generateFinalityCheckpointsResponse: func() structs.GetFinalityCheckpointsResponse {
|
||||
validResponse := generateValidFinalityCheckpointsResponse()
|
||||
validResponse.Data.Finalized = nil
|
||||
return validResponse
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "invalid finalized epoch",
|
||||
expectedError: "failed to parse finalized epoch `foo`",
|
||||
generateFinalityCheckpointsResponse: func() structs.GetFinalityCheckpointsResponse {
|
||||
validResponse := generateValidFinalityCheckpointsResponse()
|
||||
validResponse.Data.Finalized.Epoch = "foo"
|
||||
return validResponse
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "failed to get first slot of finalized epoch",
|
||||
expectedError: fmt.Sprintf("failed to get first slot for epoch `%d`", uint64(math.MaxUint64)),
|
||||
generateFinalityCheckpointsResponse: func() structs.GetFinalityCheckpointsResponse {
|
||||
validResponse := generateValidFinalityCheckpointsResponse()
|
||||
validResponse.Data.Finalized.Epoch = strconv.FormatUint(uint64(math.MaxUint64), 10)
|
||||
return validResponse
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "invalid finalized root",
|
||||
expectedError: "failed to decode finalized checkpoint root `bar`",
|
||||
generateFinalityCheckpointsResponse: func() structs.GetFinalityCheckpointsResponse {
|
||||
validResponse := generateValidFinalityCheckpointsResponse()
|
||||
validResponse.Data.Finalized.Root = "bar"
|
||||
return validResponse
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nil current justified checkpoint",
|
||||
expectedError: "current justified checkpoint is nil",
|
||||
generateFinalityCheckpointsResponse: func() structs.GetFinalityCheckpointsResponse {
|
||||
validResponse := generateValidFinalityCheckpointsResponse()
|
||||
validResponse.Data.CurrentJustified = nil
|
||||
return validResponse
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nil current justified epoch",
|
||||
expectedError: "failed to parse current justified checkpoint epoch `foo`",
|
||||
generateFinalityCheckpointsResponse: func() structs.GetFinalityCheckpointsResponse {
|
||||
validResponse := generateValidFinalityCheckpointsResponse()
|
||||
validResponse.Data.CurrentJustified.Epoch = "foo"
|
||||
return validResponse
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "failed to get first slot of current justified epoch",
|
||||
expectedError: fmt.Sprintf("failed to get first slot for epoch `%d`", uint64(math.MaxUint64)),
|
||||
generateFinalityCheckpointsResponse: func() structs.GetFinalityCheckpointsResponse {
|
||||
validResponse := generateValidFinalityCheckpointsResponse()
|
||||
validResponse.Data.CurrentJustified.Epoch = strconv.FormatUint(uint64(math.MaxUint64), 10)
|
||||
return validResponse
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "invalid current justified root",
|
||||
expectedError: "failed to decode current justified checkpoint root `bar`",
|
||||
generateFinalityCheckpointsResponse: func() structs.GetFinalityCheckpointsResponse {
|
||||
validResponse := generateValidFinalityCheckpointsResponse()
|
||||
validResponse.Data.CurrentJustified.Root = "bar"
|
||||
return validResponse
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nil previous justified checkpoint",
|
||||
expectedError: "previous justified checkpoint is nil",
|
||||
generateFinalityCheckpointsResponse: func() structs.GetFinalityCheckpointsResponse {
|
||||
validResponse := generateValidFinalityCheckpointsResponse()
|
||||
validResponse.Data.PreviousJustified = nil
|
||||
return validResponse
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nil previous justified epoch",
|
||||
expectedError: "failed to parse previous justified checkpoint epoch `foo`",
|
||||
generateFinalityCheckpointsResponse: func() structs.GetFinalityCheckpointsResponse {
|
||||
validResponse := generateValidFinalityCheckpointsResponse()
|
||||
validResponse.Data.PreviousJustified.Epoch = "foo"
|
||||
return validResponse
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "failed to get first slot of previous justified epoch",
|
||||
expectedError: fmt.Sprintf("failed to get first slot for epoch `%d`", uint64(math.MaxUint64)),
|
||||
generateFinalityCheckpointsResponse: func() structs.GetFinalityCheckpointsResponse {
|
||||
validResponse := generateValidFinalityCheckpointsResponse()
|
||||
validResponse.Data.PreviousJustified.Epoch = strconv.FormatUint(uint64(math.MaxUint64), 10)
|
||||
return validResponse
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "invalid previous justified root",
|
||||
expectedError: "failed to decode previous justified checkpoint root `bar`",
|
||||
generateFinalityCheckpointsResponse: func() structs.GetFinalityCheckpointsResponse {
|
||||
validResponse := generateValidFinalityCheckpointsResponse()
|
||||
validResponse.Data.PreviousJustified.Root = "bar"
|
||||
return validResponse
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
ctx := context.Background()
|
||||
|
||||
finalityCheckpointsResponse := structs.GetFinalityCheckpointsResponse{}
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Get(gomock.Any(), finalityCheckpointsEndpoint, &finalityCheckpointsResponse).Return(
|
||||
testCase.finalityCheckpointsError,
|
||||
).SetArg(
|
||||
2,
|
||||
testCase.generateFinalityCheckpointsResponse(),
|
||||
)
|
||||
|
||||
beaconChainClient := beaconApiChainClient{jsonRestHandler: jsonRestHandler}
|
||||
_, err := beaconChainClient.ChainHead(ctx, &emptypb.Empty{})
|
||||
assert.ErrorContains(t, testCase.expectedError, err)
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
generateValidBlockHeadersResponse := func() structs.GetBlockHeaderResponse {
|
||||
return structs.GetBlockHeaderResponse{
|
||||
Data: &structs.SignedBeaconBlockHeaderContainer{
|
||||
Root: hexutil.Encode([]byte{7}),
|
||||
Header: &structs.SignedBeaconBlockHeader{
|
||||
Message: &structs.BeaconBlockHeader{
|
||||
Slot: "8",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
t.Run("fails to get head block headers", func(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
generateHeadBlockHeadersResponse func() structs.GetBlockHeaderResponse
|
||||
headBlockHeadersError error
|
||||
expectedError string
|
||||
}{
|
||||
{
|
||||
name: "query failed",
|
||||
headBlockHeadersError: errors.New("foo error"),
|
||||
expectedError: "failed to get head block header",
|
||||
generateHeadBlockHeadersResponse: func() structs.GetBlockHeaderResponse {
|
||||
return structs.GetBlockHeaderResponse{}
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nil block header data",
|
||||
expectedError: "block header data is nil",
|
||||
generateHeadBlockHeadersResponse: func() structs.GetBlockHeaderResponse {
|
||||
validResponse := generateValidBlockHeadersResponse()
|
||||
validResponse.Data = nil
|
||||
return validResponse
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nil block header data header",
|
||||
expectedError: "block header data is nil",
|
||||
generateHeadBlockHeadersResponse: func() structs.GetBlockHeaderResponse {
|
||||
validResponse := generateValidBlockHeadersResponse()
|
||||
validResponse.Data.Header = nil
|
||||
return validResponse
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nil block header message",
|
||||
expectedError: "block header message is nil",
|
||||
generateHeadBlockHeadersResponse: func() structs.GetBlockHeaderResponse {
|
||||
validResponse := generateValidBlockHeadersResponse()
|
||||
validResponse.Data.Header.Message = nil
|
||||
return validResponse
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "invalid message slot",
|
||||
expectedError: "failed to parse head block slot `foo`",
|
||||
generateHeadBlockHeadersResponse: func() structs.GetBlockHeaderResponse {
|
||||
validResponse := generateValidBlockHeadersResponse()
|
||||
validResponse.Data.Header.Message.Slot = "foo"
|
||||
return validResponse
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "invalid root",
|
||||
expectedError: "failed to decode head block root `bar`",
|
||||
generateHeadBlockHeadersResponse: func() structs.GetBlockHeaderResponse {
|
||||
validResponse := generateValidBlockHeadersResponse()
|
||||
validResponse.Data.Root = "bar"
|
||||
return validResponse
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
|
||||
finalityCheckpointsResponse := structs.GetFinalityCheckpointsResponse{}
|
||||
jsonRestHandler.EXPECT().Get(gomock.Any(), finalityCheckpointsEndpoint, &finalityCheckpointsResponse).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
generateValidFinalityCheckpointsResponse(),
|
||||
)
|
||||
|
||||
headBlockHeadersResponse := structs.GetBlockHeaderResponse{}
|
||||
jsonRestHandler.EXPECT().Get(gomock.Any(), headBlockHeadersEndpoint, &headBlockHeadersResponse).Return(
|
||||
testCase.headBlockHeadersError,
|
||||
).SetArg(
|
||||
2,
|
||||
testCase.generateHeadBlockHeadersResponse(),
|
||||
)
|
||||
|
||||
beaconChainClient := beaconApiChainClient{jsonRestHandler: jsonRestHandler}
|
||||
_, err := beaconChainClient.ChainHead(ctx, &emptypb.Empty{})
|
||||
assert.ErrorContains(t, testCase.expectedError, err)
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("returns a valid chain head", func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
|
||||
finalityCheckpointsResponse := structs.GetFinalityCheckpointsResponse{}
|
||||
jsonRestHandler.EXPECT().Get(gomock.Any(), finalityCheckpointsEndpoint, &finalityCheckpointsResponse).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
generateValidFinalityCheckpointsResponse(),
|
||||
)
|
||||
|
||||
headBlockHeadersResponse := structs.GetBlockHeaderResponse{}
|
||||
jsonRestHandler.EXPECT().Get(gomock.Any(), headBlockHeadersEndpoint, &headBlockHeadersResponse).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
generateValidBlockHeadersResponse(),
|
||||
)
|
||||
|
||||
expectedPreviousJustifiedSlot, err := slots.EpochStart(1)
|
||||
require.NoError(t, err)
|
||||
|
||||
expectedCurrentJustifiedSlot, err := slots.EpochStart(3)
|
||||
require.NoError(t, err)
|
||||
|
||||
expectedFinalizedSlot, err := slots.EpochStart(5)
|
||||
require.NoError(t, err)
|
||||
|
||||
expectedChainHead := ðpb.ChainHead{
|
||||
PreviousJustifiedEpoch: 1,
|
||||
PreviousJustifiedBlockRoot: []byte{2},
|
||||
PreviousJustifiedSlot: expectedPreviousJustifiedSlot,
|
||||
JustifiedEpoch: 3,
|
||||
JustifiedBlockRoot: []byte{4},
|
||||
JustifiedSlot: expectedCurrentJustifiedSlot,
|
||||
FinalizedEpoch: 5,
|
||||
FinalizedBlockRoot: []byte{6},
|
||||
FinalizedSlot: expectedFinalizedSlot,
|
||||
HeadBlockRoot: []byte{7},
|
||||
HeadSlot: 8,
|
||||
HeadEpoch: slots.ToEpoch(8),
|
||||
}
|
||||
|
||||
beaconChainClient := beaconApiChainClient{jsonRestHandler: jsonRestHandler}
|
||||
chainHead, err := beaconChainClient.ChainHead(ctx, &emptypb.Empty{})
|
||||
require.NoError(t, err)
|
||||
assert.DeepEqual(t, expectedChainHead, chainHead)
|
||||
})
|
||||
}
|
||||
262
api/client/beacon/checkpoint.go
Normal file
262
api/client/beacon/checkpoint.go
Normal file
@@ -0,0 +1,262 @@
|
||||
package beacon
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"path"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
types "github.com/prysmaticlabs/eth2-types"
|
||||
"github.com/prysmaticlabs/prysm/beacon-chain/core/helpers"
|
||||
"github.com/prysmaticlabs/prysm/beacon-chain/state"
|
||||
"github.com/prysmaticlabs/prysm/encoding/ssz/detect"
|
||||
"github.com/prysmaticlabs/prysm/io/file"
|
||||
"github.com/prysmaticlabs/prysm/proto/prysm/v1alpha1/block"
|
||||
"github.com/prysmaticlabs/prysm/runtime/version"
|
||||
"github.com/prysmaticlabs/prysm/time/slots"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"golang.org/x/mod/semver"
|
||||
)
|
||||
|
||||
// OriginData represents the BeaconState and SignedBeaconBlock necessary to start an empty Beacon Node
|
||||
// using Checkpoint Sync.
|
||||
type OriginData struct {
|
||||
wsd *WeakSubjectivityData
|
||||
sb []byte
|
||||
bb []byte
|
||||
st state.BeaconState
|
||||
b block.SignedBeaconBlock
|
||||
cf *detect.VersionedUnmarshaler
|
||||
}
|
||||
|
||||
// CheckpointString returns the standard string representation of a Checkpoint for the block root and epoch for the
|
||||
// SignedBeaconBlock value found by DownloadOriginData.
|
||||
// The format is a a hex-encoded block root, followed by the epoch of the block, separated by a colon. For example:
|
||||
// "0x1c35540cac127315fabb6bf29181f2ae0de1a3fc909d2e76ba771e61312cc49a:74888"
|
||||
func (od *OriginData) CheckpointString() string {
|
||||
return fmt.Sprintf("%#x:%d", od.wsd.BlockRoot, od.wsd.Epoch)
|
||||
}
|
||||
|
||||
// SaveBlock saves the downloaded block to a unique file in the given path.
|
||||
// For readability and collision avoidance, the file name includes: type, config name, slot and root
|
||||
func (od *OriginData) SaveBlock(dir string) (string, error) {
|
||||
blockPath := path.Join(dir, fname("state", od.cf, od.st.Slot(), od.wsd.BlockRoot))
|
||||
return blockPath, file.WriteFile(blockPath, od.sb)
|
||||
}
|
||||
|
||||
// SaveState saves the downloaded state to a unique file in the given path.
|
||||
// For readability and collision avoidance, the file name includes: type, config name, slot and root
|
||||
func (od *OriginData) SaveState(dir string) (string, error) {
|
||||
statePath := path.Join(dir, fname("state", od.cf, od.st.Slot(), od.wsd.StateRoot))
|
||||
return statePath, file.WriteFile(statePath, od.sb)
|
||||
}
|
||||
|
||||
// StateBytes returns the ssz-encoded bytes of the downloaded BeaconState value.
|
||||
func (od *OriginData) StateBytes() []byte {
|
||||
return od.sb
|
||||
}
|
||||
|
||||
// BlockBytes returns the ssz-encoded bytes of the downloaded SignedBeaconBlock value.
|
||||
func (od *OriginData) BlockBytes() []byte {
|
||||
return od.bb
|
||||
}
|
||||
|
||||
func fname(prefix string, cf *detect.VersionedUnmarshaler, slot types.Slot, root [32]byte) string {
|
||||
return fmt.Sprintf("%s_%s_%s_%d-%#x.ssz", prefix, cf.Config.ConfigName, version.String(cf.Fork), slot, root)
|
||||
}
|
||||
|
||||
// this method downloads the head state, which can be used to find the correct chain config
|
||||
// and use prysm's helper methods to compute the latest weak subjectivity epoch.
|
||||
func getWeakSubjectivityEpochFromHead(ctx context.Context, client *Client) (types.Epoch, error) {
|
||||
headBytes, err := client.GetState(ctx, IdHead)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cf, err := detect.FromState(headBytes)
|
||||
if err != nil {
|
||||
return 0, errors.Wrap(err, "error detecting chain config for beacon state")
|
||||
}
|
||||
log.Printf("detected supported config in remote head state, name=%s, fork=%s", cf.Config.ConfigName, version.String(cf.Fork))
|
||||
headState, err := cf.UnmarshalBeaconState(headBytes)
|
||||
if err != nil {
|
||||
return 0, errors.Wrap(err, "error unmarshaling state to correct version")
|
||||
}
|
||||
|
||||
epoch, err := helpers.LatestWeakSubjectivityEpoch(ctx, headState, cf.Config)
|
||||
if err != nil {
|
||||
return 0, errors.Wrap(err, "error computing the weak subjectivity epoch from head state")
|
||||
}
|
||||
|
||||
log.Printf("(computed client-side) weak subjectivity epoch = %d", epoch)
|
||||
return epoch, nil
|
||||
}
|
||||
|
||||
const (
|
||||
prysmMinimumVersion = "v2.0.7"
|
||||
prysmImplementationName = "Prysm"
|
||||
)
|
||||
|
||||
// ErrUnsupportedPrysmCheckpointVersion indicates remote beacon node can't be used for checkpoint retrieval.
|
||||
var ErrUnsupportedPrysmCheckpointVersion = errors.New("node does not meet minimum version requirements for checkpoint retrieval")
|
||||
|
||||
// for older endpoints or clients that do not support the weak_subjectivity api method
|
||||
// we gather the necessary data for a checkpoint sync by:
|
||||
// - inspecting the remote server's head state and computing the weak subjectivity epoch locally
|
||||
// - requesting the state at the first slot of the epoch
|
||||
// - using hash_tree_root(state.latest_block_header) to compute the block the state integrates
|
||||
// - requesting that block by its root
|
||||
func downloadBackwardsCompatible(ctx context.Context, client *Client) (*OriginData, error) {
|
||||
log.Print("falling back to generic checkpoint derivation, weak_subjectivity API not supported by server")
|
||||
nv, err := client.GetNodeVersion(ctx)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "unable to proceed with fallback method without confirming node version")
|
||||
}
|
||||
if nv.implementation == prysmImplementationName && semver.Compare(nv.semver, prysmMinimumVersion) < 0 {
|
||||
return nil, errors.Wrapf(ErrUnsupportedPrysmCheckpointVersion, "%s < minimum (%s)", nv.semver, prysmMinimumVersion)
|
||||
}
|
||||
epoch, err := getWeakSubjectivityEpochFromHead(ctx, client)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error computing weak subjectivity epoch via head state inspection")
|
||||
}
|
||||
|
||||
// use first slot of the epoch for the state slot
|
||||
slot, err := slots.EpochStart(epoch)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "error computing first slot of epoch=%d", epoch)
|
||||
}
|
||||
|
||||
log.Printf("requesting checkpoint state at slot %d", slot)
|
||||
// get the state at the first slot of the epoch
|
||||
stateBytes, err := client.GetState(ctx, IdFromSlot(slot))
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to request state by slot from api, slot=%d", slot)
|
||||
}
|
||||
|
||||
// ConfigFork is used to unmarshal the BeaconState so we can read the block root in latest_block_header
|
||||
cf, err := detect.FromState(stateBytes)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error detecting chain config for beacon state")
|
||||
}
|
||||
log.Printf("detected supported config in checkpoint state, name=%s, fork=%s", cf.Config.ConfigName, version.String(cf.Fork))
|
||||
|
||||
st, err := cf.UnmarshalBeaconState(stateBytes)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error using detected config fork to unmarshal state bytes")
|
||||
}
|
||||
|
||||
// compute state and block roots
|
||||
stateRoot, err := st.HashTreeRoot(ctx)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error computing hash_tree_root of state")
|
||||
}
|
||||
|
||||
header := st.LatestBlockHeader()
|
||||
header.StateRoot = stateRoot[:]
|
||||
computedBlockRoot, err := header.HashTreeRoot()
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error while computing block root using state data")
|
||||
}
|
||||
|
||||
blockBytes, err := client.GetBlock(ctx, IdFromRoot(computedBlockRoot))
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "error requesting block by root = %d", computedBlockRoot)
|
||||
}
|
||||
block, err := cf.UnmarshalBeaconBlock(blockBytes)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "unable to unmarshal block to a supported type using the detected fork schedule")
|
||||
}
|
||||
blockRoot, err := block.Block().HashTreeRoot()
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error computing hash_tree_root for block obtained via root")
|
||||
}
|
||||
|
||||
log.Printf("BeaconState slot=%d, Block slot=%d", st.Slot(), block.Block().Slot())
|
||||
log.Printf("BeaconState htr=%#xd, Block state_root=%#x", stateRoot, block.Block().StateRoot())
|
||||
log.Printf("BeaconBlock root computed from state=%#x, Block htr=%#x", computedBlockRoot, blockRoot)
|
||||
|
||||
return &OriginData{
|
||||
wsd: &WeakSubjectivityData{
|
||||
BlockRoot: blockRoot,
|
||||
StateRoot: stateRoot,
|
||||
Epoch: epoch,
|
||||
},
|
||||
st: st,
|
||||
sb: stateBytes,
|
||||
b: block,
|
||||
bb: blockBytes,
|
||||
cf: cf,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// DownloadOriginData attempts to use the proposed weak_subjectivity beacon node api
|
||||
// to obtain the weak_subjectivity metadata (epoch, block_root, state_root) needed to sync
|
||||
// a beacon node from the canonical weak subjectivity checkpoint. As this is a proposed API
|
||||
// that will only be supported by prysm at first, in the event of a 404 we fallback to using a
|
||||
// different technique where we first download the head state which can be used to compute the
|
||||
// weak subjectivity epoch on the client side.
|
||||
func DownloadOriginData(ctx context.Context, client *Client) (*OriginData, error) {
|
||||
ws, err := client.GetWeakSubjectivity(ctx)
|
||||
if err != nil {
|
||||
// a 404/405 is expected if querying an endpoint that doesn't support the weak subjectivity checkpoint api
|
||||
if !errors.Is(err, ErrNotOK) {
|
||||
return nil, errors.Wrap(err, "unexpected API response for prysm-only weak subjectivity checkpoint API")
|
||||
}
|
||||
// fall back to vanilla Beacon Node API method
|
||||
return downloadBackwardsCompatible(ctx, client)
|
||||
}
|
||||
log.Printf("server weak subjectivity checkpoint response - epoch=%d, block_root=%#x, state_root=%#x", ws.Epoch, ws.BlockRoot, ws.StateRoot)
|
||||
|
||||
// use first slot of the epoch for the block slot
|
||||
slot, err := slots.EpochStart(ws.Epoch)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "error computing first slot of epoch=%d", ws.Epoch)
|
||||
}
|
||||
log.Printf("requesting checkpoint state at slot %d", slot)
|
||||
|
||||
stateBytes, err := client.GetState(ctx, IdFromSlot(slot))
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to request state by slot from api, slot=%d", slot)
|
||||
}
|
||||
cf, err := detect.FromState(stateBytes)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error detecting chain config for beacon state")
|
||||
}
|
||||
log.Printf("detected supported config in checkpoint state, name=%s, fork=%s", cf.Config.ConfigName, version.String(cf.Fork))
|
||||
|
||||
state, err := cf.UnmarshalBeaconState(stateBytes)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error using detected config fork to unmarshal state bytes")
|
||||
}
|
||||
stateRoot, err := state.HashTreeRoot(ctx)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to compute htr for state at slot=%d", slot)
|
||||
}
|
||||
|
||||
blockRoot, err := state.LatestBlockHeader().HashTreeRoot()
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error computing hash_tree_root of latest_block_header")
|
||||
}
|
||||
blockBytes, err := client.GetBlock(ctx, IdFromRoot(ws.BlockRoot))
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "error requesting block by slot = %d", slot)
|
||||
}
|
||||
block, err := cf.UnmarshalBeaconBlock(blockBytes)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "unable to unmarshal block to a supported type using the detected fork schedule")
|
||||
}
|
||||
realBlockRoot, err := block.Block().HashTreeRoot()
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error computing hash_tree_root of retrieved block")
|
||||
}
|
||||
log.Printf("BeaconState slot=%d, Block slot=%d", state.Slot(), block.Block().Slot())
|
||||
log.Printf("BeaconState htr=%#xd, Block state_root=%#x", stateRoot, block.Block().StateRoot())
|
||||
log.Printf("BeaconState latest_block_header htr=%#xd, block htr=%#x", blockRoot, realBlockRoot)
|
||||
return &OriginData{
|
||||
wsd: ws,
|
||||
st: state,
|
||||
b: block,
|
||||
sb: stateBytes,
|
||||
bb: blockBytes,
|
||||
cf: cf,
|
||||
}, nil
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package checkpoint
|
||||
package beacon
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
@@ -9,23 +9,36 @@ import (
|
||||
"net/http"
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/proto/prysm/v1alpha1/wrapper"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/beacon-chain/state"
|
||||
"github.com/prysmaticlabs/prysm/network/forks"
|
||||
ethpb "github.com/prysmaticlabs/prysm/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/testing/util"
|
||||
"github.com/prysmaticlabs/prysm/time/slots"
|
||||
|
||||
types "github.com/prysmaticlabs/eth2-types"
|
||||
"github.com/prysmaticlabs/prysm/config/params"
|
||||
"github.com/prysmaticlabs/prysm/encoding/ssz/detect"
|
||||
"github.com/prysmaticlabs/prysm/runtime/version"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon"
|
||||
"github.com/prysmaticlabs/prysm/v5/beacon-chain/state"
|
||||
"github.com/prysmaticlabs/prysm/v5/config/params"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/blocks"
|
||||
blocktest "github.com/prysmaticlabs/prysm/v5/consensus-types/blocks/testing"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v5/encoding/ssz/detect"
|
||||
"github.com/prysmaticlabs/prysm/v5/network/forks"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/v5/runtime/version"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/util"
|
||||
"github.com/prysmaticlabs/prysm/v5/time/slots"
|
||||
"github.com/prysmaticlabs/prysm/testing/require"
|
||||
)
|
||||
|
||||
type testRT struct {
|
||||
rt func(*http.Request) (*http.Response, error)
|
||||
}
|
||||
|
||||
func (rt *testRT) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
if rt.rt != nil {
|
||||
return rt.rt(req)
|
||||
}
|
||||
return nil, errors.New("RoundTripper not implemented")
|
||||
}
|
||||
|
||||
var _ http.RoundTripper = &testRT{}
|
||||
|
||||
func marshalToEnvelope(val interface{}) ([]byte, error) {
|
||||
raw, err := json.Marshal(val)
|
||||
if err != nil {
|
||||
@@ -51,12 +64,45 @@ func TestMarshalToEnvelope(t *testing.T) {
|
||||
require.Equal(t, expected, string(encoded))
|
||||
}
|
||||
|
||||
func TestFallbackVersionCheck(t *testing.T) {
|
||||
c := &Client{
|
||||
hc: &http.Client{},
|
||||
host: "localhost:3500",
|
||||
scheme: "http",
|
||||
}
|
||||
c.hc.Transport = &testRT{rt: func(req *http.Request) (*http.Response, error) {
|
||||
res := &http.Response{Request: req}
|
||||
switch req.URL.Path {
|
||||
case getNodeVersionPath:
|
||||
res.StatusCode = http.StatusOK
|
||||
b := bytes.NewBuffer(nil)
|
||||
d := struct {
|
||||
Version string `json:"version"`
|
||||
}{
|
||||
Version: "Prysm/v2.0.5 (linux amd64)",
|
||||
}
|
||||
encoded, err := marshalToEnvelope(d)
|
||||
require.NoError(t, err)
|
||||
b.Write(encoded)
|
||||
res.Body = io.NopCloser(b)
|
||||
case getWeakSubjectivityPath:
|
||||
res.StatusCode = http.StatusNotFound
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}}
|
||||
|
||||
ctx := context.Background()
|
||||
_, err := DownloadOriginData(ctx, c)
|
||||
require.ErrorIs(t, err, ErrUnsupportedPrysmCheckpointVersion)
|
||||
}
|
||||
|
||||
func TestFname(t *testing.T) {
|
||||
vu := &detect.VersionedUnmarshaler{
|
||||
Config: params.MainnetConfig(),
|
||||
Fork: version.Phase0,
|
||||
}
|
||||
slot := primitives.Slot(23)
|
||||
slot := types.Slot(23)
|
||||
prefix := "block"
|
||||
var root [32]byte
|
||||
copy(root[:], []byte{0x23, 0x23, 0x23})
|
||||
@@ -74,9 +120,9 @@ func TestFname(t *testing.T) {
|
||||
require.Equal(t, expected, actual)
|
||||
}
|
||||
|
||||
func TestDownloadWeakSubjectivityCheckpoint(t *testing.T) {
|
||||
func TestDownloadOriginData(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
cfg := params.MainnetConfig().Copy()
|
||||
cfg := params.MainnetConfig()
|
||||
|
||||
epoch := cfg.AltairForkEpoch - 1
|
||||
// set up checkpoint state, using the epoch that will be computed as the ws checkpoint state based on the head state
|
||||
@@ -85,18 +131,13 @@ func TestDownloadWeakSubjectivityCheckpoint(t *testing.T) {
|
||||
wst, err := util.NewBeaconState()
|
||||
require.NoError(t, err)
|
||||
fork, err := forkForEpoch(cfg, epoch)
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, wst.SetFork(fork))
|
||||
|
||||
// set up checkpoint block
|
||||
b, err := blocks.NewSignedBeaconBlock(util.NewBeaconBlock())
|
||||
require.NoError(t, err)
|
||||
b, err = blocktest.SetBlockParentRoot(b, cfg.ZeroHash)
|
||||
require.NoError(t, err)
|
||||
b, err = blocktest.SetBlockSlot(b, wSlot)
|
||||
require.NoError(t, err)
|
||||
b, err = blocktest.SetProposerIndex(b, 0)
|
||||
require.NoError(t, err)
|
||||
b, err := wrapper.WrappedSignedBeaconBlock(util.NewBeaconBlock())
|
||||
require.NoError(t, wrapper.SetBlockParentRoot(b, cfg.ZeroHash))
|
||||
require.NoError(t, wrapper.SetBlockSlot(b, wSlot))
|
||||
require.NoError(t, wrapper.SetProposerIndex(b, 0))
|
||||
|
||||
// set up state header pointing at checkpoint block - this is how the block is downloaded by root
|
||||
header, err := b.Header()
|
||||
@@ -110,8 +151,7 @@ func TestDownloadWeakSubjectivityCheckpoint(t *testing.T) {
|
||||
wRoot, err := wst.HashTreeRoot(ctx)
|
||||
require.NoError(t, err)
|
||||
|
||||
b, err = blocktest.SetBlockStateRoot(b, wRoot)
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, wrapper.SetBlockStateRoot(b, wRoot))
|
||||
serBlock, err := b.MarshalSSZ()
|
||||
require.NoError(t, err)
|
||||
bRoot, err := b.Block().HashTreeRoot()
|
||||
@@ -119,57 +159,65 @@ func TestDownloadWeakSubjectivityCheckpoint(t *testing.T) {
|
||||
|
||||
wsSerialized, err := wst.MarshalSSZ()
|
||||
require.NoError(t, err)
|
||||
expectedWSD := beacon.WeakSubjectivityData{
|
||||
expectedWSD := WeakSubjectivityData{
|
||||
BlockRoot: bRoot,
|
||||
StateRoot: wRoot,
|
||||
Epoch: epoch,
|
||||
}
|
||||
|
||||
trans := &testRT{rt: func(req *http.Request) (*http.Response, error) {
|
||||
res := &http.Response{Request: req}
|
||||
switch req.URL.Path {
|
||||
case beacon.GetWeakSubjectivityPath:
|
||||
res.StatusCode = http.StatusOK
|
||||
cp := struct {
|
||||
Epoch string `json:"epoch"`
|
||||
Root string `json:"root"`
|
||||
}{
|
||||
Epoch: fmt.Sprintf("%d", slots.ToEpoch(b.Block().Slot())),
|
||||
Root: fmt.Sprintf("%#x", bRoot),
|
||||
hc := &http.Client{
|
||||
Transport: &testRT{rt: func(req *http.Request) (*http.Response, error) {
|
||||
res := &http.Response{Request: req}
|
||||
switch req.URL.Path {
|
||||
case getWeakSubjectivityPath:
|
||||
res.StatusCode = http.StatusOK
|
||||
cp := struct {
|
||||
Epoch string `json:"epoch"`
|
||||
Root string `json:"root"`
|
||||
}{
|
||||
Epoch: fmt.Sprintf("%d", slots.ToEpoch(b.Block().Slot())),
|
||||
Root: fmt.Sprintf("%#x", bRoot),
|
||||
}
|
||||
wsr := struct {
|
||||
Checkpoint interface{} `json:"ws_checkpoint"`
|
||||
StateRoot string `json:"state_root"`
|
||||
}{
|
||||
Checkpoint: cp,
|
||||
StateRoot: fmt.Sprintf("%#x", wRoot),
|
||||
}
|
||||
rb, err := marshalToEnvelope(wsr)
|
||||
require.NoError(t, err)
|
||||
res.Body = io.NopCloser(bytes.NewBuffer(rb))
|
||||
case renderGetStatePath(IdFromSlot(wSlot)):
|
||||
res.StatusCode = http.StatusOK
|
||||
res.Body = io.NopCloser(bytes.NewBuffer(wsSerialized))
|
||||
case renderGetBlockPath(IdFromRoot(bRoot)):
|
||||
res.StatusCode = http.StatusOK
|
||||
res.Body = io.NopCloser(bytes.NewBuffer(serBlock))
|
||||
}
|
||||
wsr := struct {
|
||||
Checkpoint interface{} `json:"ws_checkpoint"`
|
||||
StateRoot string `json:"state_root"`
|
||||
}{
|
||||
Checkpoint: cp,
|
||||
StateRoot: fmt.Sprintf("%#x", wRoot),
|
||||
}
|
||||
rb, err := marshalToEnvelope(wsr)
|
||||
require.NoError(t, err)
|
||||
res.Body = io.NopCloser(bytes.NewBuffer(rb))
|
||||
case beacon.RenderGetStatePath(beacon.IdFromSlot(wSlot)):
|
||||
res.StatusCode = http.StatusOK
|
||||
res.Body = io.NopCloser(bytes.NewBuffer(wsSerialized))
|
||||
case beacon.RenderGetBlockPath(beacon.IdFromRoot(bRoot)):
|
||||
res.StatusCode = http.StatusOK
|
||||
res.Body = io.NopCloser(bytes.NewBuffer(serBlock))
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}}
|
||||
return res, nil
|
||||
}},
|
||||
}
|
||||
c := &Client{
|
||||
hc: hc,
|
||||
host: "localhost:3500",
|
||||
scheme: "http",
|
||||
}
|
||||
|
||||
c, err := beacon.NewClient("http://localhost:3500", client.WithRoundTripper(trans))
|
||||
od, err := DownloadOriginData(ctx, c)
|
||||
require.NoError(t, err)
|
||||
|
||||
wsd, err := ComputeWeakSubjectivityCheckpoint(ctx, c)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, expectedWSD.Epoch, wsd.Epoch)
|
||||
require.Equal(t, expectedWSD.StateRoot, wsd.StateRoot)
|
||||
require.Equal(t, expectedWSD.BlockRoot, wsd.BlockRoot)
|
||||
require.Equal(t, expectedWSD.Epoch, od.wsd.Epoch)
|
||||
require.Equal(t, expectedWSD.StateRoot, od.wsd.StateRoot)
|
||||
require.Equal(t, expectedWSD.BlockRoot, od.wsd.BlockRoot)
|
||||
require.DeepEqual(t, wsSerialized, od.sb)
|
||||
require.DeepEqual(t, serBlock, od.bb)
|
||||
require.DeepEqual(t, wst.Fork().CurrentVersion, od.cf.Version[:])
|
||||
require.DeepEqual(t, version.Phase0, od.cf.Fork)
|
||||
}
|
||||
|
||||
// runs computeBackwardsCompatible directly
|
||||
// and via ComputeWeakSubjectivityCheckpoint with a round tripper that triggers the backwards compatible code path
|
||||
// runs downloadBackwardsCompatible directly
|
||||
// and via DownloadOriginData with a round tripper that triggers the backwards compatible code path
|
||||
func TestDownloadBackwardsCompatibleCombined(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
cfg := params.MainnetConfig()
|
||||
@@ -184,18 +232,13 @@ func TestDownloadBackwardsCompatibleCombined(t *testing.T) {
|
||||
wst, err := util.NewBeaconState()
|
||||
require.NoError(t, err)
|
||||
fork, err := forkForEpoch(cfg, cfg.GenesisEpoch)
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, wst.SetFork(fork))
|
||||
|
||||
// set up checkpoint block
|
||||
b, err := blocks.NewSignedBeaconBlock(util.NewBeaconBlock())
|
||||
require.NoError(t, err)
|
||||
b, err = blocktest.SetBlockParentRoot(b, cfg.ZeroHash)
|
||||
require.NoError(t, err)
|
||||
b, err = blocktest.SetBlockSlot(b, wSlot)
|
||||
require.NoError(t, err)
|
||||
b, err = blocktest.SetProposerIndex(b, 0)
|
||||
require.NoError(t, err)
|
||||
b, err := wrapper.WrappedSignedBeaconBlock(util.NewBeaconBlock())
|
||||
require.NoError(t, wrapper.SetBlockParentRoot(b, cfg.ZeroHash))
|
||||
require.NoError(t, wrapper.SetBlockSlot(b, wSlot))
|
||||
require.NoError(t, wrapper.SetProposerIndex(b, 0))
|
||||
|
||||
// set up state header pointing at checkpoint block - this is how the block is downloaded by root
|
||||
header, err := b.Header()
|
||||
@@ -209,8 +252,7 @@ func TestDownloadBackwardsCompatibleCombined(t *testing.T) {
|
||||
wRoot, err := wst.HashTreeRoot(ctx)
|
||||
require.NoError(t, err)
|
||||
|
||||
b, err = blocktest.SetBlockStateRoot(b, wRoot)
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, wrapper.SetBlockStateRoot(b, wRoot))
|
||||
serBlock, err := b.MarshalSSZ()
|
||||
require.NoError(t, err)
|
||||
bRoot, err := b.Block().HashTreeRoot()
|
||||
@@ -219,68 +261,82 @@ func TestDownloadBackwardsCompatibleCombined(t *testing.T) {
|
||||
wsSerialized, err := wst.MarshalSSZ()
|
||||
require.NoError(t, err)
|
||||
|
||||
trans := &testRT{rt: func(req *http.Request) (*http.Response, error) {
|
||||
res := &http.Response{Request: req}
|
||||
switch req.URL.Path {
|
||||
case beacon.GetNodeVersionPath:
|
||||
res.StatusCode = http.StatusOK
|
||||
b := bytes.NewBuffer(nil)
|
||||
d := struct {
|
||||
Version string `json:"version"`
|
||||
}{
|
||||
Version: "Lighthouse/v0.1.5 (Linux x86_64)",
|
||||
hc := &http.Client{
|
||||
Transport: &testRT{rt: func(req *http.Request) (*http.Response, error) {
|
||||
res := &http.Response{Request: req}
|
||||
switch req.URL.Path {
|
||||
case getNodeVersionPath:
|
||||
res.StatusCode = http.StatusOK
|
||||
b := bytes.NewBuffer(nil)
|
||||
d := struct {
|
||||
Version string `json:"version"`
|
||||
}{
|
||||
Version: "Lighthouse/v0.1.5 (Linux x86_64)",
|
||||
}
|
||||
encoded, err := marshalToEnvelope(d)
|
||||
require.NoError(t, err)
|
||||
b.Write(encoded)
|
||||
res.Body = io.NopCloser(b)
|
||||
case getWeakSubjectivityPath:
|
||||
res.StatusCode = http.StatusNotFound
|
||||
case renderGetStatePath(IdHead):
|
||||
res.StatusCode = http.StatusOK
|
||||
res.Body = io.NopCloser(bytes.NewBuffer(serialized))
|
||||
case renderGetStatePath(IdFromSlot(wSlot)):
|
||||
res.StatusCode = http.StatusOK
|
||||
res.Body = io.NopCloser(bytes.NewBuffer(wsSerialized))
|
||||
case renderGetBlockPath(IdFromRoot(bRoot)):
|
||||
res.StatusCode = http.StatusOK
|
||||
res.Body = io.NopCloser(bytes.NewBuffer(serBlock))
|
||||
}
|
||||
encoded, err := marshalToEnvelope(d)
|
||||
require.NoError(t, err)
|
||||
b.Write(encoded)
|
||||
res.Body = io.NopCloser(b)
|
||||
case beacon.GetWeakSubjectivityPath:
|
||||
res.StatusCode = http.StatusNotFound
|
||||
case beacon.RenderGetStatePath(beacon.IdHead):
|
||||
res.StatusCode = http.StatusOK
|
||||
res.Body = io.NopCloser(bytes.NewBuffer(serialized))
|
||||
case beacon.RenderGetStatePath(beacon.IdFromSlot(wSlot)):
|
||||
res.StatusCode = http.StatusOK
|
||||
res.Body = io.NopCloser(bytes.NewBuffer(wsSerialized))
|
||||
case beacon.RenderGetBlockPath(beacon.IdFromRoot(bRoot)):
|
||||
res.StatusCode = http.StatusOK
|
||||
res.Body = io.NopCloser(bytes.NewBuffer(serBlock))
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}}
|
||||
return res, nil
|
||||
}},
|
||||
}
|
||||
c := &Client{
|
||||
hc: hc,
|
||||
host: "localhost:3500",
|
||||
scheme: "http",
|
||||
}
|
||||
|
||||
c, err := beacon.NewClient("http://localhost:3500", client.WithRoundTripper(trans))
|
||||
odPub, err := DownloadOriginData(ctx, c)
|
||||
require.NoError(t, err)
|
||||
|
||||
wsPub, err := ComputeWeakSubjectivityCheckpoint(ctx, c)
|
||||
odPriv, err := downloadBackwardsCompatible(ctx, c)
|
||||
require.NoError(t, err)
|
||||
|
||||
wsPriv, err := computeBackwardsCompatible(ctx, c)
|
||||
require.NoError(t, err)
|
||||
require.DeepEqual(t, wsPriv, wsPub)
|
||||
require.DeepEqual(t, odPriv.wsd, odPub.wsd)
|
||||
require.DeepEqual(t, odPriv.sb, odPub.sb)
|
||||
require.DeepEqual(t, odPriv.bb, odPub.bb)
|
||||
require.DeepEqual(t, odPriv.cf.Fork, odPub.cf.Fork)
|
||||
require.DeepEqual(t, odPriv.cf.Version, odPub.cf.Version)
|
||||
}
|
||||
|
||||
func TestGetWeakSubjectivityEpochFromHead(t *testing.T) {
|
||||
st, expectedEpoch := defaultTestHeadState(t, params.MainnetConfig())
|
||||
serialized, err := st.MarshalSSZ()
|
||||
require.NoError(t, err)
|
||||
trans := &testRT{rt: func(req *http.Request) (*http.Response, error) {
|
||||
res := &http.Response{Request: req}
|
||||
if req.URL.Path == beacon.RenderGetStatePath(beacon.IdHead) {
|
||||
res.StatusCode = http.StatusOK
|
||||
res.Body = io.NopCloser(bytes.NewBuffer(serialized))
|
||||
}
|
||||
return res, nil
|
||||
}}
|
||||
c, err := beacon.NewClient("http://localhost:3500", client.WithRoundTripper(trans))
|
||||
require.NoError(t, err)
|
||||
hc := &http.Client{
|
||||
Transport: &testRT{rt: func(req *http.Request) (*http.Response, error) {
|
||||
res := &http.Response{Request: req}
|
||||
switch req.URL.Path {
|
||||
case renderGetStatePath(IdHead):
|
||||
res.StatusCode = http.StatusOK
|
||||
res.Body = io.NopCloser(bytes.NewBuffer(serialized))
|
||||
}
|
||||
return res, nil
|
||||
}},
|
||||
}
|
||||
c := &Client{
|
||||
hc: hc,
|
||||
host: "localhost:3500",
|
||||
scheme: "http",
|
||||
}
|
||||
actualEpoch, err := getWeakSubjectivityEpochFromHead(context.Background(), c)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, expectedEpoch, actualEpoch)
|
||||
}
|
||||
|
||||
func forkForEpoch(cfg *params.BeaconChainConfig, epoch primitives.Epoch) (*ethpb.Fork, error) {
|
||||
func forkForEpoch(cfg *params.BeaconChainConfig, epoch types.Epoch) (*ethpb.Fork, error) {
|
||||
os := forks.NewOrderedSchedule(cfg)
|
||||
currentVersion, err := os.VersionForEpoch(epoch)
|
||||
if err != nil {
|
||||
@@ -302,7 +358,7 @@ func forkForEpoch(cfg *params.BeaconChainConfig, epoch primitives.Epoch) (*ethpb
|
||||
}, nil
|
||||
}
|
||||
|
||||
func defaultTestHeadState(t *testing.T, cfg *params.BeaconChainConfig) (state.BeaconState, primitives.Epoch) {
|
||||
func defaultTestHeadState(t *testing.T, cfg *params.BeaconChainConfig) (state.BeaconState, types.Epoch) {
|
||||
st, err := util.NewBeaconStateAltair()
|
||||
require.NoError(t, err)
|
||||
|
||||
@@ -343,5 +399,9 @@ func populateValidators(cfg *params.BeaconChainConfig, st state.BeaconState, val
|
||||
if err := st.SetValidators(validators); err != nil {
|
||||
return err
|
||||
}
|
||||
return st.SetBalances(balances)
|
||||
if err := st.SetBalances(balances); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -5,36 +5,37 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/network/forks"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v5/encoding/bytesutil"
|
||||
"github.com/prysmaticlabs/prysm/v5/network/forks"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
"github.com/sirupsen/logrus"
|
||||
types "github.com/prysmaticlabs/eth2-types"
|
||||
"github.com/prysmaticlabs/prysm/beacon-chain/rpc/apimiddleware"
|
||||
"github.com/prysmaticlabs/prysm/encoding/bytesutil"
|
||||
ethpb "github.com/prysmaticlabs/prysm/proto/prysm/v1alpha1"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
const (
|
||||
getSignedBlockPath = "/eth/v2/beacon/blocks"
|
||||
getBlockRootPath = "/eth/v1/beacon/blocks/{{.Id}}/root"
|
||||
getForkForStatePath = "/eth/v1/beacon/states/{{.Id}}/fork"
|
||||
getForkSchedulePath = "/eth/v1/config/fork_schedule"
|
||||
getConfigSpecPath = "/eth/v1/config/spec"
|
||||
getStatePath = "/eth/v2/debug/beacon/states"
|
||||
changeBLStoExecutionPath = "/eth/v1/beacon/pool/bls_to_execution_changes"
|
||||
|
||||
GetNodeVersionPath = "/eth/v1/node/version"
|
||||
GetWeakSubjectivityPath = "/prysm/v1/beacon/weak_subjectivity"
|
||||
getSignedBlockPath = "/eth/v2/beacon/blocks"
|
||||
getBlockRootPath = "/eth/v1/beacon/blocks/{{.Id}}/root"
|
||||
getForkForStatePath = "/eth/v1/beacon/states/{{.Id}}/fork"
|
||||
getWeakSubjectivityPath = "/eth/v1/beacon/weak_subjectivity"
|
||||
getForkSchedulePath = "/eth/v1/config/fork_schedule"
|
||||
getStatePath = "/eth/v2/debug/beacon/states"
|
||||
getNodeVersionPath = "/eth/v1/node/version"
|
||||
)
|
||||
|
||||
// StateOrBlockId represents the block_id / state_id parameters that several of the Eth Beacon API methods accept.
|
||||
@@ -46,9 +47,10 @@ const (
|
||||
type StateOrBlockId string
|
||||
|
||||
const (
|
||||
IdFinalized StateOrBlockId = "finalized"
|
||||
IdGenesis StateOrBlockId = "genesis"
|
||||
IdHead StateOrBlockId = "head"
|
||||
IdFinalized StateOrBlockId = "finalized"
|
||||
IdJustified StateOrBlockId = "justified"
|
||||
)
|
||||
|
||||
// IdFromRoot encodes a block root in the format expected by the API in places where a root can be used to identify
|
||||
@@ -57,29 +59,126 @@ func IdFromRoot(r [32]byte) StateOrBlockId {
|
||||
return StateOrBlockId(fmt.Sprintf("%#x", r))
|
||||
}
|
||||
|
||||
// IdFromSlot encodes a Slot in the format expected by the API in places where a slot can be used to identify
|
||||
// IdFromRoot encodes a Slot in the format expected by the API in places where a slot can be used to identify
|
||||
// a BeaconState or SignedBeaconBlock.
|
||||
func IdFromSlot(s primitives.Slot) StateOrBlockId {
|
||||
func IdFromSlot(s types.Slot) StateOrBlockId {
|
||||
return StateOrBlockId(strconv.FormatUint(uint64(s), 10))
|
||||
}
|
||||
|
||||
// RenderGetBlockPath formats a block id into a path for the GetBlock API endpoint.
|
||||
func RenderGetBlockPath(id StateOrBlockId) string {
|
||||
return path.Join(getSignedBlockPath, string(id))
|
||||
// idTemplate is used to create template functions that can interpolate StateOrBlockId values.
|
||||
func idTemplate(ts string) func(StateOrBlockId) string {
|
||||
t := template.Must(template.New("").Parse(ts))
|
||||
f := func(id StateOrBlockId) string {
|
||||
b := bytes.NewBuffer(nil)
|
||||
err := t.Execute(b, struct{ Id string }{Id: string(id)})
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("invalid idTemplate: %s", ts))
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
// run the template to ensure that it is valid
|
||||
// this should happen load time (using package scoped vars) to ensure runtime errors aren't possible
|
||||
_ = f(IdGenesis)
|
||||
return f
|
||||
}
|
||||
|
||||
// ClientOpt is a functional option for the Client type (http.Client wrapper)
|
||||
type ClientOpt func(*Client)
|
||||
|
||||
// WithTimeout sets the .Timeout attribute of the wrapped http.Client.
|
||||
func WithTimeout(timeout time.Duration) ClientOpt {
|
||||
return func(c *Client) {
|
||||
c.hc.Timeout = timeout
|
||||
}
|
||||
}
|
||||
|
||||
// Client provides a collection of helper methods for calling the Eth Beacon Node API endpoints.
|
||||
type Client struct {
|
||||
*client.Client
|
||||
hc *http.Client
|
||||
host string
|
||||
scheme string
|
||||
}
|
||||
|
||||
// NewClient returns a new Client that includes functions for rest calls to Beacon API.
|
||||
func NewClient(host string, opts ...client.ClientOpt) (*Client, error) {
|
||||
c, err := client.NewClient(host, opts...)
|
||||
// NewClient constructs a new client with the provided options (ex WithTimeout).
|
||||
// `host` is the base host + port used to construct request urls. This value can be
|
||||
// a URL string, or NewClient will assume an http endpoint if just `host:port` is used.
|
||||
func NewClient(host string, opts ...ClientOpt) (*Client, error) {
|
||||
host, err := validHostname(host)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &Client{c}, nil
|
||||
c := &Client{
|
||||
hc: &http.Client{},
|
||||
scheme: "http",
|
||||
host: host,
|
||||
}
|
||||
for _, o := range opts {
|
||||
o(c)
|
||||
}
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func validHostname(h string) (string, error) {
|
||||
// try to parse as url (being permissive)
|
||||
u, err := url.Parse(h)
|
||||
if err == nil && u.Host != "" {
|
||||
return u.Host, nil
|
||||
}
|
||||
// try to parse as host:port
|
||||
host, port, err := net.SplitHostPort(h)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return fmt.Sprintf("%s:%s", host, port), nil
|
||||
}
|
||||
|
||||
func (c *Client) urlForPath(methodPath string) *url.URL {
|
||||
u := &url.URL{
|
||||
Scheme: c.scheme,
|
||||
Host: c.host,
|
||||
}
|
||||
u.Path = path.Join(u.Path, methodPath)
|
||||
return u
|
||||
}
|
||||
|
||||
type reqOption func(*http.Request)
|
||||
|
||||
func withSSZEncoding() reqOption {
|
||||
return func(req *http.Request) {
|
||||
req.Header.Set("Accept", "application/octet-stream")
|
||||
}
|
||||
}
|
||||
|
||||
// get is a generic, opinionated GET function to reduce boilerplate amongst the getters in this package.
|
||||
func (c *Client) get(ctx context.Context, path string, opts ...reqOption) ([]byte, error) {
|
||||
u := c.urlForPath(path)
|
||||
log.Printf("requesting %s", u.String())
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, o := range opts {
|
||||
o(req)
|
||||
}
|
||||
r, err := c.hc.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
err = r.Body.Close()
|
||||
}()
|
||||
if r.StatusCode != http.StatusOK {
|
||||
return nil, non200Err(r)
|
||||
}
|
||||
b, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error reading http response body from GetBlock")
|
||||
}
|
||||
return b, nil
|
||||
}
|
||||
|
||||
func renderGetBlockPath(id StateOrBlockId) string {
|
||||
return path.Join(getSignedBlockPath, string(id))
|
||||
}
|
||||
|
||||
// GetBlock retrieves the SignedBeaconBlock for the given block id.
|
||||
@@ -88,21 +187,23 @@ func NewClient(host string, opts ...client.ClientOpt) (*Client, error) {
|
||||
// for the named identifiers.
|
||||
// The return value contains the ssz-encoded bytes.
|
||||
func (c *Client) GetBlock(ctx context.Context, blockId StateOrBlockId) ([]byte, error) {
|
||||
blockPath := RenderGetBlockPath(blockId)
|
||||
b, err := c.Get(ctx, blockPath, client.WithSSZEncoding())
|
||||
blockPath := renderGetBlockPath(blockId)
|
||||
b, err := c.get(ctx, blockPath, withSSZEncoding())
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "error requesting state by id = %s", blockId)
|
||||
}
|
||||
return b, nil
|
||||
}
|
||||
|
||||
var getBlockRootTpl = idTemplate(getBlockRootPath)
|
||||
|
||||
// GetBlockRoot retrieves the hash_tree_root of the BeaconBlock for the given block id.
|
||||
// Block identifier can be one of: "head" (canonical head in node's view), "genesis", "finalized",
|
||||
// <slot>, <hex encoded blockRoot with 0x prefix>. Variables of type StateOrBlockId are exported by this package
|
||||
// for the named identifiers.
|
||||
func (c *Client) GetBlockRoot(ctx context.Context, blockId StateOrBlockId) ([32]byte, error) {
|
||||
rootPath := getBlockRootTpl(blockId)
|
||||
b, err := c.Get(ctx, rootPath)
|
||||
b, err := c.get(ctx, rootPath)
|
||||
if err != nil {
|
||||
return [32]byte{}, errors.Wrapf(err, "error requesting block root by id = %s", blockId)
|
||||
}
|
||||
@@ -118,28 +219,30 @@ func (c *Client) GetBlockRoot(ctx context.Context, blockId StateOrBlockId) ([32]
|
||||
return bytesutil.ToBytes32(rs), nil
|
||||
}
|
||||
|
||||
var getForkTpl = idTemplate(getForkForStatePath)
|
||||
|
||||
// GetFork queries the Beacon Node API for the Fork from the state identified by stateId.
|
||||
// Block identifier can be one of: "head" (canonical head in node's view), "genesis", "finalized",
|
||||
// <slot>, <hex encoded blockRoot with 0x prefix>. Variables of type StateOrBlockId are exported by this package
|
||||
// for the named identifiers.
|
||||
func (c *Client) GetFork(ctx context.Context, stateId StateOrBlockId) (*ethpb.Fork, error) {
|
||||
body, err := c.Get(ctx, getForkTpl(stateId))
|
||||
body, err := c.get(ctx, getForkTpl(stateId))
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "error requesting fork by state id = %s", stateId)
|
||||
}
|
||||
fr := &structs.Fork{}
|
||||
dataWrapper := &struct{ Data *structs.Fork }{Data: fr}
|
||||
fr := &forkResponse{}
|
||||
dataWrapper := &struct{ Data *forkResponse }{Data: fr}
|
||||
err = json.Unmarshal(body, dataWrapper)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error decoding json response in GetFork")
|
||||
}
|
||||
|
||||
return fr.ToConsensus()
|
||||
return fr.Fork()
|
||||
}
|
||||
|
||||
// GetForkSchedule retrieve all forks, past present and future, of which this node is aware.
|
||||
func (c *Client) GetForkSchedule(ctx context.Context) (forks.OrderedSchedule, error) {
|
||||
body, err := c.Get(ctx, getForkSchedulePath)
|
||||
body, err := c.get(ctx, getForkSchedulePath)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error requesting fork schedule")
|
||||
}
|
||||
@@ -155,36 +258,18 @@ func (c *Client) GetForkSchedule(ctx context.Context) (forks.OrderedSchedule, er
|
||||
return ofs, nil
|
||||
}
|
||||
|
||||
// GetConfigSpec retrieve the current configs of the network used by the beacon node.
|
||||
func (c *Client) GetConfigSpec(ctx context.Context) (*structs.GetSpecResponse, error) {
|
||||
body, err := c.Get(ctx, getConfigSpecPath)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error requesting configSpecPath")
|
||||
}
|
||||
fsr := &structs.GetSpecResponse{}
|
||||
err = json.Unmarshal(body, fsr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return fsr, nil
|
||||
}
|
||||
|
||||
type NodeVersion struct {
|
||||
implementation string
|
||||
semver string
|
||||
systemInfo string
|
||||
}
|
||||
|
||||
func (nv *NodeVersion) SetImplementation(impl string) {
|
||||
nv.implementation = impl
|
||||
}
|
||||
|
||||
var versionRE = regexp.MustCompile(`^(\w+)/(v\d+\.\d+\.\d+[-a-zA-Z0-9]*)\s*/?(.*)$`)
|
||||
var versionRE = regexp.MustCompile(`^(\w+)\/(v\d+\.\d+\.\d+) \((.*)\)$`)
|
||||
|
||||
func parseNodeVersion(v string) (*NodeVersion, error) {
|
||||
groups := versionRE.FindStringSubmatch(v)
|
||||
if len(groups) != 4 {
|
||||
return nil, errors.Wrapf(client.ErrInvalidNodeVersion, "could not be parsed: %s", v)
|
||||
return nil, errors.Wrapf(ErrInvalidNodeVersion, "could not be parsed: %s", v)
|
||||
}
|
||||
return &NodeVersion{
|
||||
implementation: groups[1],
|
||||
@@ -196,7 +281,7 @@ func parseNodeVersion(v string) (*NodeVersion, error) {
|
||||
// GetNodeVersion requests that the beacon node identify information about its implementation in a format
|
||||
// similar to a HTTP User-Agent field. ex: Lighthouse/v0.1.5 (Linux x86_64)
|
||||
func (c *Client) GetNodeVersion(ctx context.Context) (*NodeVersion, error) {
|
||||
b, err := c.Get(ctx, GetNodeVersionPath)
|
||||
b, err := c.get(ctx, getNodeVersionPath)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error requesting node version")
|
||||
}
|
||||
@@ -212,8 +297,7 @@ func (c *Client) GetNodeVersion(ctx context.Context) (*NodeVersion, error) {
|
||||
return parseNodeVersion(d.Data.Version)
|
||||
}
|
||||
|
||||
// RenderGetStatePath formats a state id into a path for the GetState API endpoint.
|
||||
func RenderGetStatePath(id StateOrBlockId) string {
|
||||
func renderGetStatePath(id StateOrBlockId) string {
|
||||
return path.Join(getStatePath, string(id))
|
||||
}
|
||||
|
||||
@@ -224,49 +308,33 @@ func RenderGetStatePath(id StateOrBlockId) string {
|
||||
// The return value contains the ssz-encoded bytes.
|
||||
func (c *Client) GetState(ctx context.Context, stateId StateOrBlockId) ([]byte, error) {
|
||||
statePath := path.Join(getStatePath, string(stateId))
|
||||
b, err := c.Get(ctx, statePath, client.WithSSZEncoding())
|
||||
b, err := c.get(ctx, statePath, withSSZEncoding())
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "error requesting state by id = %s", stateId)
|
||||
}
|
||||
return b, nil
|
||||
}
|
||||
|
||||
// WeakSubjectivityData represents the state root, block root and epoch of the BeaconState + ReadOnlySignedBeaconBlock
|
||||
// that falls at the beginning of the current weak subjectivity period. These values can be used to construct
|
||||
// a weak subjectivity checkpoint beacon node flag to be used for validation.
|
||||
type WeakSubjectivityData struct {
|
||||
BlockRoot [32]byte
|
||||
StateRoot [32]byte
|
||||
Epoch primitives.Epoch
|
||||
}
|
||||
|
||||
// CheckpointString returns the standard string representation of a Checkpoint.
|
||||
// The format is a hex-encoded block root, followed by the epoch of the block, separated by a colon. For example:
|
||||
// "0x1c35540cac127315fabb6bf29181f2ae0de1a3fc909d2e76ba771e61312cc49a:74888"
|
||||
func (wsd *WeakSubjectivityData) CheckpointString() string {
|
||||
return fmt.Sprintf("%#x:%d", wsd.BlockRoot, wsd.Epoch)
|
||||
}
|
||||
|
||||
// GetWeakSubjectivity calls a proposed API endpoint that is unique to prysm
|
||||
// This api method does the following:
|
||||
// - computes weak subjectivity epoch
|
||||
// - finds the highest non-skipped block preceding the epoch
|
||||
// - returns the htr of the found block and returns this + the value of state_root from the block
|
||||
func (c *Client) GetWeakSubjectivity(ctx context.Context) (*WeakSubjectivityData, error) {
|
||||
body, err := c.Get(ctx, GetWeakSubjectivityPath)
|
||||
body, err := c.get(ctx, getWeakSubjectivityPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
v := &structs.GetWeakSubjectivityResponse{}
|
||||
v := &apimiddleware.WeakSubjectivityResponse{}
|
||||
err = json.Unmarshal(body, v)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
epoch, err := strconv.ParseUint(v.Data.WsCheckpoint.Epoch, 10, 64)
|
||||
epoch, err := strconv.ParseUint(v.Data.Checkpoint.Epoch, 10, 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
blockRoot, err := hexutil.Decode(v.Data.WsCheckpoint.Root)
|
||||
blockRoot, err := hexutil.Decode(v.Data.Checkpoint.Root)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -275,76 +343,81 @@ func (c *Client) GetWeakSubjectivity(ctx context.Context) (*WeakSubjectivityData
|
||||
return nil, err
|
||||
}
|
||||
return &WeakSubjectivityData{
|
||||
Epoch: primitives.Epoch(epoch),
|
||||
Epoch: types.Epoch(epoch),
|
||||
BlockRoot: bytesutil.ToBytes32(blockRoot),
|
||||
StateRoot: bytesutil.ToBytes32(stateRoot),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// SubmitChangeBLStoExecution calls a beacon API endpoint to set the withdrawal addresses based on the given signed messages.
|
||||
// If the API responds with something other than OK there will be failure messages associated to the corresponding request message.
|
||||
func (c *Client) SubmitChangeBLStoExecution(ctx context.Context, request []*structs.SignedBLSToExecutionChange) error {
|
||||
u := c.BaseURL().ResolveReference(&url.URL{Path: changeBLStoExecutionPath})
|
||||
body, err := json.Marshal(request)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "failed to marshal JSON")
|
||||
}
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost, u.String(), bytes.NewBuffer(body))
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "invalid format, failed to create new POST request object")
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
resp, err := c.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
err = resp.Body.Close()
|
||||
}()
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
decoder := json.NewDecoder(resp.Body)
|
||||
decoder.DisallowUnknownFields()
|
||||
errorJson := &server.IndexedVerificationFailureError{}
|
||||
if err := decoder.Decode(errorJson); err != nil {
|
||||
return errors.Wrapf(err, "failed to decode error JSON for %s", resp.Request.URL)
|
||||
}
|
||||
for _, failure := range errorJson.Failures {
|
||||
w := request[failure.Index].Message
|
||||
log.WithFields(logrus.Fields{
|
||||
"validatorIndex": w.ValidatorIndex,
|
||||
"withdrawalAddress": w.ToExecutionAddress,
|
||||
}).Error(failure.Message)
|
||||
}
|
||||
return errors.Errorf("POST error %d: %s", errorJson.Code, errorJson.Message)
|
||||
}
|
||||
return nil
|
||||
// WeakSubjectivityData represents the state root, block root and epoch of the BeaconState + SignedBeaconBlock
|
||||
// that falls at the beginning of the current weak subjectivity period. These values can be used to construct
|
||||
// a weak subjectivity checkpoint, or to download a BeaconState+SignedBeaconBlock pair that can be used to bootstrap
|
||||
// a new Beacon Node using Checkpoint Sync.
|
||||
type WeakSubjectivityData struct {
|
||||
BlockRoot [32]byte
|
||||
StateRoot [32]byte
|
||||
Epoch types.Epoch
|
||||
}
|
||||
|
||||
// GetBLStoExecutionChanges gets all the set withdrawal messages in the node's operation pool.
|
||||
// Returns a struct representation of json response.
|
||||
func (c *Client) GetBLStoExecutionChanges(ctx context.Context) (*structs.BLSToExecutionChangesPoolResponse, error) {
|
||||
body, err := c.Get(ctx, changeBLStoExecutionPath)
|
||||
func non200Err(response *http.Response) error {
|
||||
bodyBytes, err := ioutil.ReadAll(response.Body)
|
||||
var body string
|
||||
if err != nil {
|
||||
body = "(Unable to read response body.)"
|
||||
} else {
|
||||
body = "response body:\n" + string(bodyBytes)
|
||||
}
|
||||
msg := fmt.Sprintf("code=%d, url=%s, body=%s", response.StatusCode, response.Request.URL, body)
|
||||
switch response.StatusCode {
|
||||
case 404:
|
||||
return errors.Wrap(ErrNotFound, msg)
|
||||
default:
|
||||
return errors.Wrap(ErrNotOK, msg)
|
||||
}
|
||||
}
|
||||
|
||||
type forkResponse struct {
|
||||
PreviousVersion string `json:"previous_version"`
|
||||
CurrentVersion string `json:"current_version"`
|
||||
Epoch string `json:"epoch"`
|
||||
}
|
||||
|
||||
func (f *forkResponse) Fork() (*ethpb.Fork, error) {
|
||||
epoch, err := strconv.ParseUint(f.Epoch, 10, 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
poolResponse := &structs.BLSToExecutionChangesPoolResponse{}
|
||||
err = json.Unmarshal(body, poolResponse)
|
||||
cSlice, err := hexutil.Decode(f.CurrentVersion)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return poolResponse, nil
|
||||
if len(cSlice) != 4 {
|
||||
return nil, fmt.Errorf("got %d byte version for CurrentVersion, expected 4 bytes. hex=%s", len(cSlice), f.CurrentVersion)
|
||||
}
|
||||
pSlice, err := hexutil.Decode(f.PreviousVersion)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(pSlice) != 4 {
|
||||
return nil, fmt.Errorf("got %d byte version, expected 4 bytes. version hex=%s", len(pSlice), f.PreviousVersion)
|
||||
}
|
||||
return ðpb.Fork{
|
||||
CurrentVersion: cSlice,
|
||||
PreviousVersion: pSlice,
|
||||
Epoch: types.Epoch(epoch),
|
||||
}, nil
|
||||
}
|
||||
|
||||
type forkScheduleResponse struct {
|
||||
Data []structs.Fork
|
||||
Data []forkResponse
|
||||
}
|
||||
|
||||
func (fsr *forkScheduleResponse) OrderedForkSchedule() (forks.OrderedSchedule, error) {
|
||||
ofs := make(forks.OrderedSchedule, 0)
|
||||
for _, d := range fsr.Data {
|
||||
epoch, err := strconv.ParseUint(d.Epoch, 10, 64)
|
||||
epoch, err := strconv.Atoi(d.Epoch)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "error parsing epoch %s", d.Epoch)
|
||||
return nil, err
|
||||
}
|
||||
vSlice, err := hexutil.Decode(d.CurrentVersion)
|
||||
if err != nil {
|
||||
@@ -356,7 +429,7 @@ func (fsr *forkScheduleResponse) OrderedForkSchedule() (forks.OrderedSchedule, e
|
||||
version := bytesutil.ToBytes4(vSlice)
|
||||
ofs = append(ofs, forks.ForkScheduleEntry{
|
||||
Version: version,
|
||||
Epoch: primitives.Epoch(epoch),
|
||||
Epoch: types.Epoch(uint64(epoch)),
|
||||
})
|
||||
}
|
||||
sort.Sort(ofs)
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
package beacon
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
"github.com/prysmaticlabs/prysm/testing/require"
|
||||
)
|
||||
|
||||
func TestParseNodeVersion(t *testing.T) {
|
||||
@@ -18,17 +16,22 @@ func TestParseNodeVersion(t *testing.T) {
|
||||
{
|
||||
name: "empty string",
|
||||
v: "",
|
||||
err: client.ErrInvalidNodeVersion,
|
||||
err: ErrInvalidNodeVersion,
|
||||
},
|
||||
{
|
||||
name: "Prysm as the version string",
|
||||
v: "Prysm",
|
||||
err: client.ErrInvalidNodeVersion,
|
||||
err: ErrInvalidNodeVersion,
|
||||
},
|
||||
{
|
||||
name: "semver only",
|
||||
v: "v2.0.6",
|
||||
err: client.ErrInvalidNodeVersion,
|
||||
err: ErrInvalidNodeVersion,
|
||||
},
|
||||
{
|
||||
name: "implementation and semver only",
|
||||
v: "Prysm/v2.0.6",
|
||||
err: ErrInvalidNodeVersion,
|
||||
},
|
||||
{
|
||||
name: "complete version",
|
||||
@@ -36,34 +39,7 @@ func TestParseNodeVersion(t *testing.T) {
|
||||
nv: &NodeVersion{
|
||||
implementation: "Prysm",
|
||||
semver: "v2.0.6",
|
||||
systemInfo: "(linux amd64)",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nimbus version",
|
||||
v: "Nimbus/v22.4.0-039bec-stateofus",
|
||||
nv: &NodeVersion{
|
||||
implementation: "Nimbus",
|
||||
semver: "v22.4.0-039bec-stateofus",
|
||||
systemInfo: "",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "teku version",
|
||||
v: "teku/v22.3.2/linux-x86_64/oracle-java-11",
|
||||
nv: &NodeVersion{
|
||||
implementation: "teku",
|
||||
semver: "v22.3.2",
|
||||
systemInfo: "linux-x86_64/oracle-java-11",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "lighthouse version",
|
||||
v: "Lighthouse/v2.1.1-5f628a7/x86_64-linux",
|
||||
nv: &NodeVersion{
|
||||
implementation: "Lighthouse",
|
||||
semver: "v2.1.1-5f628a7",
|
||||
systemInfo: "x86_64-linux",
|
||||
systemInfo: "linux amd64",
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -80,60 +56,3 @@ func TestParseNodeVersion(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidHostname(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
hostArg string
|
||||
path string
|
||||
joined string
|
||||
err error
|
||||
}{
|
||||
{
|
||||
name: "hostname without port",
|
||||
hostArg: "mydomain.org",
|
||||
err: client.ErrMalformedHostname,
|
||||
},
|
||||
{
|
||||
name: "hostname with port",
|
||||
hostArg: "mydomain.org:3500",
|
||||
path: GetNodeVersionPath,
|
||||
joined: "http://mydomain.org:3500/eth/v1/node/version",
|
||||
},
|
||||
{
|
||||
name: "https scheme, hostname with port",
|
||||
hostArg: "https://mydomain.org:3500",
|
||||
path: GetNodeVersionPath,
|
||||
joined: "https://mydomain.org:3500/eth/v1/node/version",
|
||||
},
|
||||
{
|
||||
name: "http scheme, hostname without port",
|
||||
hostArg: "http://mydomain.org",
|
||||
path: GetNodeVersionPath,
|
||||
joined: "http://mydomain.org/eth/v1/node/version",
|
||||
},
|
||||
{
|
||||
name: "http scheme, trailing slash, hostname without port",
|
||||
hostArg: "http://mydomain.org/",
|
||||
path: GetNodeVersionPath,
|
||||
joined: "http://mydomain.org/eth/v1/node/version",
|
||||
},
|
||||
{
|
||||
name: "http scheme, hostname with basic auth creds and no port",
|
||||
hostArg: "http://username:pass@mydomain.org/",
|
||||
path: GetNodeVersionPath,
|
||||
joined: "http://username:pass@mydomain.org/eth/v1/node/version",
|
||||
},
|
||||
}
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
cl, err := NewClient(c.hostArg)
|
||||
if c.err != nil {
|
||||
require.ErrorIs(t, err, c.err)
|
||||
return
|
||||
}
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, c.joined, cl.BaseURL().ResolveReference(&url.URL{Path: c.path}).String())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
/*
|
||||
Package beacon provides a client for interacting with the standard Eth Beacon Node API.
|
||||
Interactive swagger documentation for the API is available here: https://ethereum.github.io/beacon-APIs/
|
||||
|
||||
*/
|
||||
package beacon
|
||||
|
||||
13
api/client/beacon/errors.go
Normal file
13
api/client/beacon/errors.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package beacon
|
||||
|
||||
import "github.com/pkg/errors"
|
||||
|
||||
// ErrNotOK is used to indicate when an HTTP request to the Beacon Node API failed with any non-2xx response code.
|
||||
// More specific errors may be returned, but an error in reaction to a non-2xx response will always wrap ErrNotOK.
|
||||
var ErrNotOK = errors.New("did not receive 2xx response from API")
|
||||
|
||||
// ErrNotFound specifically means that a '404 - NOT FOUND' response was received from the API.
|
||||
var ErrNotFound = errors.Wrap(ErrNotOK, "recv 404 NotFound response from API")
|
||||
|
||||
// ErrInvalidNodeVersion indicates that the /eth/v1/node/version api response format was not recognized.
|
||||
var ErrInvalidNodeVersion = errors.New("invalid node version response")
|
||||
@@ -1,20 +0,0 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library", "go_test")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"health_mock.go",
|
||||
"interfaces.go",
|
||||
"tracker.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/client/beacon/health",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = ["@org_uber_go_mock//gomock:go_default_library"],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "go_default_test",
|
||||
srcs = ["tracker_test.go"],
|
||||
embed = [":go_default_library"],
|
||||
deps = ["@org_uber_go_mock//gomock:go_default_library"],
|
||||
)
|
||||
65
api/client/beacon/health/health_mock.go
generated
65
api/client/beacon/health/health_mock.go
generated
@@ -1,65 +0,0 @@
|
||||
package health
|
||||
|
||||
import (
|
||||
"context"
|
||||
"reflect"
|
||||
"sync"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
)
|
||||
|
||||
// NewMockHealthClient creates a new mock instance.
|
||||
func NewMockHealthClient(ctrl *gomock.Controller) *MockHealthClient {
|
||||
mock := &MockHealthClient{ctrl: ctrl}
|
||||
mock.recorder = &MockHealthClientMockRecorder{mock}
|
||||
return mock
|
||||
}
|
||||
|
||||
// MockHealthClient is a mock of HealthClient interface.
|
||||
type MockHealthClient struct {
|
||||
ctrl *gomock.Controller
|
||||
recorder *MockHealthClientMockRecorder
|
||||
Health bool
|
||||
sync.Mutex
|
||||
}
|
||||
|
||||
// MockHealthClientMockRecorder is the mock recorder for MockHealthClient.
|
||||
type MockHealthClientMockRecorder struct {
|
||||
mock *MockHealthClient
|
||||
}
|
||||
|
||||
// IsHealthy mocks base method.
|
||||
func (m *MockHealthClient) IsHealthy(arg0 context.Context) bool {
|
||||
m.Lock()
|
||||
defer m.Unlock()
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "IsHealthy", arg0)
|
||||
ret0, ok := ret[0].(bool)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
return ret0
|
||||
}
|
||||
|
||||
func (m *MockHealthClient) HealthUpdates() <-chan bool {
|
||||
ch := make(chan bool, 2)
|
||||
ch <- m.Health
|
||||
return ch
|
||||
}
|
||||
|
||||
func (m *MockHealthClient) CheckHealth(_ context.Context) bool {
|
||||
return m.Health
|
||||
}
|
||||
|
||||
// EXPECT returns an object that allows the caller to indicate expected use.
|
||||
func (m *MockHealthClient) EXPECT() *MockHealthClientMockRecorder {
|
||||
return m.recorder
|
||||
}
|
||||
|
||||
// IsHealthy indicates an expected call of IsHealthy.
|
||||
func (mr *MockHealthClientMockRecorder) IsHealthy(arg0 any) *gomock.Call {
|
||||
mr.mock.Lock()
|
||||
defer mr.mock.Unlock()
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsHealthy", reflect.TypeOf((*MockHealthClient)(nil).IsHealthy), arg0)
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
package health
|
||||
|
||||
import "context"
|
||||
|
||||
type HealthTracker interface {
|
||||
HealthUpdates() <-chan bool
|
||||
IsHealthy(ctx context.Context) bool
|
||||
CheckHealth(ctx context.Context) bool
|
||||
}
|
||||
|
||||
type HealthNode interface {
|
||||
IsHealthy(ctx context.Context) bool
|
||||
}
|
||||
@@ -1,58 +0,0 @@
|
||||
package health
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type healthTracker struct {
|
||||
isHealthy *bool
|
||||
healthChan chan bool
|
||||
node HealthNode
|
||||
sync.RWMutex
|
||||
}
|
||||
|
||||
func NewTracker(node HealthNode) HealthTracker {
|
||||
return &healthTracker{
|
||||
node: node,
|
||||
healthChan: make(chan bool, 1),
|
||||
}
|
||||
}
|
||||
|
||||
// HealthUpdates provides a read-only channel for health updates.
|
||||
func (n *healthTracker) HealthUpdates() <-chan bool {
|
||||
return n.healthChan
|
||||
}
|
||||
|
||||
func (n *healthTracker) IsHealthy(_ context.Context) bool {
|
||||
n.RLock()
|
||||
defer n.RUnlock()
|
||||
if n.isHealthy == nil {
|
||||
return false
|
||||
}
|
||||
return *n.isHealthy
|
||||
}
|
||||
|
||||
func (n *healthTracker) CheckHealth(ctx context.Context) bool {
|
||||
n.Lock()
|
||||
defer n.Unlock()
|
||||
|
||||
newStatus := n.node.IsHealthy(ctx)
|
||||
if n.isHealthy == nil {
|
||||
n.isHealthy = &newStatus
|
||||
}
|
||||
|
||||
isStatusChanged := newStatus != *n.isHealthy
|
||||
if isStatusChanged {
|
||||
// Update the health status
|
||||
n.isHealthy = &newStatus
|
||||
// Send the new status to the health channel, potentially overwriting the existing value
|
||||
select {
|
||||
case <-n.healthChan:
|
||||
n.healthChan <- newStatus
|
||||
default:
|
||||
n.healthChan <- newStatus
|
||||
}
|
||||
}
|
||||
return newStatus
|
||||
}
|
||||
@@ -1,115 +0,0 @@
|
||||
package health
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
)
|
||||
|
||||
var (
|
||||
_ = HealthTracker(&MockHealthClient{})
|
||||
)
|
||||
|
||||
func TestNodeHealth_IsHealthy(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
isHealthy bool
|
||||
want bool
|
||||
}{
|
||||
{"initially healthy", true, true},
|
||||
{"initially unhealthy", false, false},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
n := &healthTracker{
|
||||
isHealthy: &tt.isHealthy,
|
||||
healthChan: make(chan bool, 1),
|
||||
}
|
||||
if got := n.IsHealthy(context.Background()); got != tt.want {
|
||||
t.Errorf("IsHealthy() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNodeHealth_UpdateNodeHealth(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
initial bool // Initial health status
|
||||
newStatus bool // Status to update to
|
||||
shouldSend bool // Should a message be sent through the channel
|
||||
}{
|
||||
{"healthy to unhealthy", true, false, true},
|
||||
{"unhealthy to healthy", false, true, true},
|
||||
{"remain healthy", true, true, false},
|
||||
{"remain unhealthy", false, false, false},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
client := NewMockHealthClient(ctrl)
|
||||
client.EXPECT().IsHealthy(gomock.Any()).Return(tt.newStatus)
|
||||
n := &healthTracker{
|
||||
isHealthy: &tt.initial,
|
||||
node: client,
|
||||
healthChan: make(chan bool, 1),
|
||||
}
|
||||
|
||||
s := n.CheckHealth(context.Background())
|
||||
// Check if health status was updated
|
||||
if s != tt.newStatus {
|
||||
t.Errorf("UpdateNodeHealth() failed to update isHealthy from %v to %v", tt.initial, tt.newStatus)
|
||||
}
|
||||
|
||||
select {
|
||||
case status := <-n.HealthUpdates():
|
||||
if !tt.shouldSend {
|
||||
t.Errorf("UpdateNodeHealth() unexpectedly sent status %v to HealthCh", status)
|
||||
} else if status != tt.newStatus {
|
||||
t.Errorf("UpdateNodeHealth() sent wrong status %v, want %v", status, tt.newStatus)
|
||||
}
|
||||
default:
|
||||
if tt.shouldSend {
|
||||
t.Error("UpdateNodeHealth() did not send any status to HealthCh when expected")
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNodeHealth_Concurrency(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
client := NewMockHealthClient(ctrl)
|
||||
n := NewTracker(client)
|
||||
var wg sync.WaitGroup
|
||||
|
||||
// Number of goroutines to spawn for both reading and writing
|
||||
numGoroutines := 6
|
||||
|
||||
wg.Add(numGoroutines * 2) // for readers and writers
|
||||
|
||||
// Concurrently update health status
|
||||
for i := 0; i < numGoroutines; i++ {
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
client.EXPECT().IsHealthy(gomock.Any()).Return(false).Times(1)
|
||||
n.CheckHealth(context.Background())
|
||||
client.EXPECT().IsHealthy(gomock.Any()).Return(true).Times(1)
|
||||
n.CheckHealth(context.Background())
|
||||
}()
|
||||
}
|
||||
|
||||
// Concurrently read health status
|
||||
for i := 0; i < numGoroutines; i++ {
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
_ = n.IsHealthy(context.Background()) // Just read the value
|
||||
}()
|
||||
}
|
||||
|
||||
wg.Wait() // Wait for all goroutines to finish
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
package beacon
|
||||
|
||||
import "github.com/sirupsen/logrus"
|
||||
|
||||
var log = logrus.WithField("prefix", "beacon")
|
||||
@@ -1,29 +0,0 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"beacon_block_converter_mock.go",
|
||||
"chain_client_mock.go",
|
||||
"duties_mock.go",
|
||||
"genesis_mock.go",
|
||||
"json_rest_handler_mock.go",
|
||||
"node_client_mock.go",
|
||||
"prysm_chain_client_mock.go",
|
||||
"state_validators_mock.go",
|
||||
"validator_client_mock.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/client/beacon/mock",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//api/client/beacon:go_default_library",
|
||||
"//api/client/beacon/health:go_default_library",
|
||||
"//api/client/event:go_default_library",
|
||||
"//api/server/structs:go_default_library",
|
||||
"//consensus-types/primitives:go_default_library",
|
||||
"//consensus-types/validator:go_default_library",
|
||||
"//proto/prysm/v1alpha1:go_default_library",
|
||||
"@org_golang_google_protobuf//types/known/emptypb:go_default_library",
|
||||
"@org_uber_go_mock//gomock:go_default_library",
|
||||
],
|
||||
)
|
||||
101
api/client/beacon/mock/beacon_block_converter_mock.go
generated
101
api/client/beacon/mock/beacon_block_converter_mock.go
generated
@@ -1,101 +0,0 @@
|
||||
// Code generated by MockGen. DO NOT EDIT.
|
||||
// Source: validator/client/beacon-api/beacon_block_converter.go
|
||||
//
|
||||
// Generated by this command:
|
||||
//
|
||||
// mockgen -package=mock -source=validator/client/beacon-api/beacon_block_converter.go -destination=validator/client/beacon-api/mock/beacon_block_converter_mock.go
|
||||
//
|
||||
|
||||
// Package mock is a generated GoMock package.
|
||||
package mock
|
||||
|
||||
import (
|
||||
reflect "reflect"
|
||||
|
||||
structs "github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
eth "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
gomock "go.uber.org/mock/gomock"
|
||||
)
|
||||
|
||||
// MockBeaconBlockConverter is a mock of BeaconBlockConverter interface.
|
||||
type MockBeaconBlockConverter struct {
|
||||
ctrl *gomock.Controller
|
||||
recorder *MockBeaconBlockConverterMockRecorder
|
||||
}
|
||||
|
||||
// MockBeaconBlockConverterMockRecorder is the mock recorder for MockBeaconBlockConverter.
|
||||
type MockBeaconBlockConverterMockRecorder struct {
|
||||
mock *MockBeaconBlockConverter
|
||||
}
|
||||
|
||||
// NewMockBeaconBlockConverter creates a new mock instance.
|
||||
func NewMockBeaconBlockConverter(ctrl *gomock.Controller) *MockBeaconBlockConverter {
|
||||
mock := &MockBeaconBlockConverter{ctrl: ctrl}
|
||||
mock.recorder = &MockBeaconBlockConverterMockRecorder{mock}
|
||||
return mock
|
||||
}
|
||||
|
||||
// EXPECT returns an object that allows the caller to indicate expected use.
|
||||
func (m *MockBeaconBlockConverter) EXPECT() *MockBeaconBlockConverterMockRecorder {
|
||||
return m.recorder
|
||||
}
|
||||
|
||||
// ConvertRESTAltairBlockToProto mocks base method.
|
||||
func (m *MockBeaconBlockConverter) ConvertRESTAltairBlockToProto(block *structs.BeaconBlockAltair) (*eth.BeaconBlockAltair, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ConvertRESTAltairBlockToProto", block)
|
||||
ret0, _ := ret[0].(*eth.BeaconBlockAltair)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// ConvertRESTAltairBlockToProto indicates an expected call of ConvertRESTAltairBlockToProto.
|
||||
func (mr *MockBeaconBlockConverterMockRecorder) ConvertRESTAltairBlockToProto(block any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ConvertRESTAltairBlockToProto", reflect.TypeOf((*MockBeaconBlockConverter)(nil).ConvertRESTAltairBlockToProto), block)
|
||||
}
|
||||
|
||||
// ConvertRESTBellatrixBlockToProto mocks base method.
|
||||
func (m *MockBeaconBlockConverter) ConvertRESTBellatrixBlockToProto(block *structs.BeaconBlockBellatrix) (*eth.BeaconBlockBellatrix, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ConvertRESTBellatrixBlockToProto", block)
|
||||
ret0, _ := ret[0].(*eth.BeaconBlockBellatrix)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// ConvertRESTBellatrixBlockToProto indicates an expected call of ConvertRESTBellatrixBlockToProto.
|
||||
func (mr *MockBeaconBlockConverterMockRecorder) ConvertRESTBellatrixBlockToProto(block any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ConvertRESTBellatrixBlockToProto", reflect.TypeOf((*MockBeaconBlockConverter)(nil).ConvertRESTBellatrixBlockToProto), block)
|
||||
}
|
||||
|
||||
// ConvertRESTCapellaBlockToProto mocks base method.
|
||||
func (m *MockBeaconBlockConverter) ConvertRESTCapellaBlockToProto(block *structs.BeaconBlockCapella) (*eth.BeaconBlockCapella, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ConvertRESTCapellaBlockToProto", block)
|
||||
ret0, _ := ret[0].(*eth.BeaconBlockCapella)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// ConvertRESTCapellaBlockToProto indicates an expected call of ConvertRESTCapellaBlockToProto.
|
||||
func (mr *MockBeaconBlockConverterMockRecorder) ConvertRESTCapellaBlockToProto(block any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ConvertRESTCapellaBlockToProto", reflect.TypeOf((*MockBeaconBlockConverter)(nil).ConvertRESTCapellaBlockToProto), block)
|
||||
}
|
||||
|
||||
// ConvertRESTPhase0BlockToProto mocks base method.
|
||||
func (m *MockBeaconBlockConverter) ConvertRESTPhase0BlockToProto(block *structs.BeaconBlock) (*eth.BeaconBlock, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ConvertRESTPhase0BlockToProto", block)
|
||||
ret0, _ := ret[0].(*eth.BeaconBlock)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// ConvertRESTPhase0BlockToProto indicates an expected call of ConvertRESTPhase0BlockToProto.
|
||||
func (mr *MockBeaconBlockConverterMockRecorder) ConvertRESTPhase0BlockToProto(block any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ConvertRESTPhase0BlockToProto", reflect.TypeOf((*MockBeaconBlockConverter)(nil).ConvertRESTPhase0BlockToProto), block)
|
||||
}
|
||||
132
api/client/beacon/mock/chain_client_mock.go
generated
132
api/client/beacon/mock/chain_client_mock.go
generated
@@ -1,132 +0,0 @@
|
||||
// Code generated by MockGen. DO NOT EDIT.
|
||||
// Source: github.com/prysmaticlabs/prysm/v5/validator/client/iface (interfaces: ChainClient)
|
||||
//
|
||||
// Generated by this command:
|
||||
//
|
||||
// mockgen -package=validator_mock -destination=testing/validator-mock/chain_client_mock.go github.com/prysmaticlabs/prysm/v5/validator/client/iface ChainClient
|
||||
//
|
||||
|
||||
// Package validator_mock is a generated GoMock package.
|
||||
package mock
|
||||
|
||||
import (
|
||||
context "context"
|
||||
reflect "reflect"
|
||||
|
||||
eth "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
gomock "go.uber.org/mock/gomock"
|
||||
emptypb "google.golang.org/protobuf/types/known/emptypb"
|
||||
)
|
||||
|
||||
// MockChainClient is a mock of ChainClient interface.
|
||||
type MockChainClient struct {
|
||||
ctrl *gomock.Controller
|
||||
recorder *MockChainClientMockRecorder
|
||||
}
|
||||
|
||||
// MockChainClientMockRecorder is the mock recorder for MockChainClient.
|
||||
type MockChainClientMockRecorder struct {
|
||||
mock *MockChainClient
|
||||
}
|
||||
|
||||
// NewMockChainClient creates a new mock instance.
|
||||
func NewMockChainClient(ctrl *gomock.Controller) *MockChainClient {
|
||||
mock := &MockChainClient{ctrl: ctrl}
|
||||
mock.recorder = &MockChainClientMockRecorder{mock}
|
||||
return mock
|
||||
}
|
||||
|
||||
// EXPECT returns an object that allows the caller to indicate expected use.
|
||||
func (m *MockChainClient) EXPECT() *MockChainClientMockRecorder {
|
||||
return m.recorder
|
||||
}
|
||||
|
||||
// ChainHead mocks base method.
|
||||
func (m *MockChainClient) ChainHead(arg0 context.Context, arg1 *emptypb.Empty) (*eth.ChainHead, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ChainHead", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.ChainHead)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// ChainHead indicates an expected call of ChainHead.
|
||||
func (mr *MockChainClientMockRecorder) ChainHead(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ChainHead", reflect.TypeOf((*MockChainClient)(nil).ChainHead), arg0, arg1)
|
||||
}
|
||||
|
||||
// ValidatorBalances mocks base method.
|
||||
func (m *MockChainClient) ValidatorBalances(arg0 context.Context, arg1 *eth.ListValidatorBalancesRequest) (*eth.ValidatorBalances, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ValidatorBalances", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.ValidatorBalances)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// ValidatorBalances indicates an expected call of ValidatorBalances.
|
||||
func (mr *MockChainClientMockRecorder) ValidatorBalances(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidatorBalances", reflect.TypeOf((*MockChainClient)(nil).ValidatorBalances), arg0, arg1)
|
||||
}
|
||||
|
||||
// ValidatorParticipation mocks base method.
|
||||
func (m *MockChainClient) ValidatorParticipation(arg0 context.Context, arg1 *eth.GetValidatorParticipationRequest) (*eth.ValidatorParticipationResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ValidatorParticipation", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.ValidatorParticipationResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// ValidatorParticipation indicates an expected call of ValidatorParticipation.
|
||||
func (mr *MockChainClientMockRecorder) ValidatorParticipation(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidatorParticipation", reflect.TypeOf((*MockChainClient)(nil).ValidatorParticipation), arg0, arg1)
|
||||
}
|
||||
|
||||
// ValidatorPerformance mocks base method.
|
||||
func (m *MockChainClient) ValidatorPerformance(arg0 context.Context, arg1 *eth.ValidatorPerformanceRequest) (*eth.ValidatorPerformanceResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ValidatorPerformance", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.ValidatorPerformanceResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// ValidatorPerformance indicates an expected call of ValidatorPerformance.
|
||||
func (mr *MockChainClientMockRecorder) ValidatorPerformance(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidatorPerformance", reflect.TypeOf((*MockChainClient)(nil).ValidatorPerformance), arg0, arg1)
|
||||
}
|
||||
|
||||
// ValidatorQueue mocks base method.
|
||||
func (m *MockChainClient) ValidatorQueue(arg0 context.Context, arg1 *emptypb.Empty) (*eth.ValidatorQueue, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ValidatorQueue", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.ValidatorQueue)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// ValidatorQueue indicates an expected call of ValidatorQueue.
|
||||
func (mr *MockChainClientMockRecorder) ValidatorQueue(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidatorQueue", reflect.TypeOf((*MockChainClient)(nil).ValidatorQueue), arg0, arg1)
|
||||
}
|
||||
|
||||
// Validators mocks base method.
|
||||
func (m *MockChainClient) Validators(arg0 context.Context, arg1 *eth.ListValidatorsRequest) (*eth.Validators, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "Validators", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.Validators)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// Validators indicates an expected call of Validators.
|
||||
func (mr *MockChainClientMockRecorder) Validators(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Validators", reflect.TypeOf((*MockChainClient)(nil).Validators), arg0, arg1)
|
||||
}
|
||||
102
api/client/beacon/mock/duties_mock.go
generated
102
api/client/beacon/mock/duties_mock.go
generated
@@ -1,102 +0,0 @@
|
||||
// Code generated by MockGen. DO NOT EDIT.
|
||||
// Source: validator/client/beacon-api/duties.go
|
||||
//
|
||||
// Generated by this command:
|
||||
//
|
||||
// mockgen -package=mock -source=validator/client/beacon-api/duties.go -destination=validator/client/beacon-api/mock/duties_mock.go
|
||||
//
|
||||
|
||||
// Package mock is a generated GoMock package.
|
||||
package mock
|
||||
|
||||
import (
|
||||
context "context"
|
||||
reflect "reflect"
|
||||
|
||||
structs "github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
primitives "github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
gomock "go.uber.org/mock/gomock"
|
||||
)
|
||||
|
||||
// MockdutiesProvider is a mock of dutiesProvider interface.
|
||||
type MockdutiesProvider struct {
|
||||
ctrl *gomock.Controller
|
||||
recorder *MockdutiesProviderMockRecorder
|
||||
}
|
||||
|
||||
// MockdutiesProviderMockRecorder is the mock recorder for MockdutiesProvider.
|
||||
type MockdutiesProviderMockRecorder struct {
|
||||
mock *MockdutiesProvider
|
||||
}
|
||||
|
||||
// NewMockdutiesProvider creates a new mock instance.
|
||||
func NewMockdutiesProvider(ctrl *gomock.Controller) *MockdutiesProvider {
|
||||
mock := &MockdutiesProvider{ctrl: ctrl}
|
||||
mock.recorder = &MockdutiesProviderMockRecorder{mock}
|
||||
return mock
|
||||
}
|
||||
|
||||
// EXPECT returns an object that allows the caller to indicate expected use.
|
||||
func (m *MockdutiesProvider) EXPECT() *MockdutiesProviderMockRecorder {
|
||||
return m.recorder
|
||||
}
|
||||
|
||||
// AttesterDuties mocks base method.
|
||||
func (m *MockdutiesProvider) AttesterDuties(ctx context.Context, epoch primitives.Epoch, validatorIndices []primitives.ValidatorIndex) ([]*structs.AttesterDuty, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "AttesterDuties", ctx, epoch, validatorIndices)
|
||||
ret0, _ := ret[0].([]*structs.AttesterDuty)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// AttesterDuties indicates an expected call of AttesterDuties.
|
||||
func (mr *MockdutiesProviderMockRecorder) AttesterDuties(ctx, epoch, validatorIndices any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AttesterDuties", reflect.TypeOf((*MockdutiesProvider)(nil).AttesterDuties), ctx, epoch, validatorIndices)
|
||||
}
|
||||
|
||||
// Committees mocks base method.
|
||||
func (m *MockdutiesProvider) Committees(ctx context.Context, epoch primitives.Epoch) ([]*structs.Committee, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "Committees", ctx, epoch)
|
||||
ret0, _ := ret[0].([]*structs.Committee)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// Committees indicates an expected call of Committees.
|
||||
func (mr *MockdutiesProviderMockRecorder) Committees(ctx, epoch any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Committees", reflect.TypeOf((*MockdutiesProvider)(nil).Committees), ctx, epoch)
|
||||
}
|
||||
|
||||
// ProposerDuties mocks base method.
|
||||
func (m *MockdutiesProvider) ProposerDuties(ctx context.Context, epoch primitives.Epoch) ([]*structs.ProposerDuty, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ProposerDuties", ctx, epoch)
|
||||
ret0, _ := ret[0].([]*structs.ProposerDuty)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// ProposerDuties indicates an expected call of ProposerDuties.
|
||||
func (mr *MockdutiesProviderMockRecorder) ProposerDuties(ctx, epoch any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ProposerDuties", reflect.TypeOf((*MockdutiesProvider)(nil).ProposerDuties), ctx, epoch)
|
||||
}
|
||||
|
||||
// SyncDuties mocks base method.
|
||||
func (m *MockdutiesProvider) SyncDuties(ctx context.Context, epoch primitives.Epoch, validatorIndices []primitives.ValidatorIndex) ([]*structs.SyncCommitteeDuty, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "SyncDuties", ctx, epoch, validatorIndices)
|
||||
ret0, _ := ret[0].([]*structs.SyncCommitteeDuty)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// SyncDuties indicates an expected call of SyncDuties.
|
||||
func (mr *MockdutiesProviderMockRecorder) SyncDuties(ctx, epoch, validatorIndices any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SyncDuties", reflect.TypeOf((*MockdutiesProvider)(nil).SyncDuties), ctx, epoch, validatorIndices)
|
||||
}
|
||||
56
api/client/beacon/mock/genesis_mock.go
generated
56
api/client/beacon/mock/genesis_mock.go
generated
@@ -1,56 +0,0 @@
|
||||
// Code generated by MockGen. DO NOT EDIT.
|
||||
// Source: validator/client/beacon-api/genesis.go
|
||||
//
|
||||
// Generated by this command:
|
||||
//
|
||||
// mockgen -package=mock -source=validator/client/beacon-api/genesis.go -destination=validator/client/beacon-api/mock/genesis_mock.go
|
||||
//
|
||||
|
||||
// Package mock is a generated GoMock package.
|
||||
package mock
|
||||
|
||||
import (
|
||||
context "context"
|
||||
reflect "reflect"
|
||||
|
||||
structs "github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
gomock "go.uber.org/mock/gomock"
|
||||
)
|
||||
|
||||
// MockGenesisProvider is a mock of GenesisProvider interface.
|
||||
type MockGenesisProvider struct {
|
||||
ctrl *gomock.Controller
|
||||
recorder *MockGenesisProviderMockRecorder
|
||||
}
|
||||
|
||||
// MockGenesisProviderMockRecorder is the mock recorder for MockGenesisProvider.
|
||||
type MockGenesisProviderMockRecorder struct {
|
||||
mock *MockGenesisProvider
|
||||
}
|
||||
|
||||
// NewMockGenesisProvider creates a new mock instance.
|
||||
func NewMockGenesisProvider(ctrl *gomock.Controller) *MockGenesisProvider {
|
||||
mock := &MockGenesisProvider{ctrl: ctrl}
|
||||
mock.recorder = &MockGenesisProviderMockRecorder{mock}
|
||||
return mock
|
||||
}
|
||||
|
||||
// EXPECT returns an object that allows the caller to indicate expected use.
|
||||
func (m *MockGenesisProvider) EXPECT() *MockGenesisProviderMockRecorder {
|
||||
return m.recorder
|
||||
}
|
||||
|
||||
// Genesis mocks base method.
|
||||
func (m *MockGenesisProvider) Genesis(ctx context.Context) (*structs.Genesis, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "Genesis", ctx)
|
||||
ret0, _ := ret[0].(*structs.Genesis)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// Genesis indicates an expected call of Genesis.
|
||||
func (mr *MockGenesisProviderMockRecorder) Genesis(ctx any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Genesis", reflect.TypeOf((*MockGenesisProvider)(nil).Genesis), ctx)
|
||||
}
|
||||
110
api/client/beacon/mock/json_rest_handler_mock.go
generated
110
api/client/beacon/mock/json_rest_handler_mock.go
generated
@@ -1,110 +0,0 @@
|
||||
// Code generated by MockGen. DO NOT EDIT.
|
||||
// Source: validator/client/beacon-api/json_rest_handler.go
|
||||
//
|
||||
// Generated by this command:
|
||||
//
|
||||
// mockgen -package=mock -source=validator/client/beacon-api/json_rest_handler.go -destination=validator/client/beacon-api/mock/json_rest_handler_mock.go
|
||||
//
|
||||
|
||||
// Package mock is a generated GoMock package.
|
||||
package mock
|
||||
|
||||
import (
|
||||
bytes "bytes"
|
||||
context "context"
|
||||
http "net/http"
|
||||
reflect "reflect"
|
||||
|
||||
gomock "go.uber.org/mock/gomock"
|
||||
)
|
||||
|
||||
// MockJsonRestHandler is a mock of JsonRestHandler interface.
|
||||
type MockJsonRestHandler struct {
|
||||
ctrl *gomock.Controller
|
||||
recorder *MockJsonRestHandlerMockRecorder
|
||||
}
|
||||
|
||||
// MockJsonRestHandlerMockRecorder is the mock recorder for MockJsonRestHandler.
|
||||
type MockJsonRestHandlerMockRecorder struct {
|
||||
mock *MockJsonRestHandler
|
||||
}
|
||||
|
||||
// NewMockJsonRestHandler creates a new mock instance.
|
||||
func NewMockJsonRestHandler(ctrl *gomock.Controller) *MockJsonRestHandler {
|
||||
mock := &MockJsonRestHandler{ctrl: ctrl}
|
||||
mock.recorder = &MockJsonRestHandlerMockRecorder{mock}
|
||||
return mock
|
||||
}
|
||||
|
||||
// EXPECT returns an object that allows the caller to indicate expected use.
|
||||
func (m *MockJsonRestHandler) EXPECT() *MockJsonRestHandlerMockRecorder {
|
||||
return m.recorder
|
||||
}
|
||||
|
||||
// Get mocks base method.
|
||||
func (m *MockJsonRestHandler) Get(ctx context.Context, endpoint string, resp any) error {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "Get", ctx, endpoint, resp)
|
||||
ret0, _ := ret[0].(error)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// Get indicates an expected call of Get.
|
||||
func (mr *MockJsonRestHandlerMockRecorder) Get(ctx, endpoint, resp any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Get", reflect.TypeOf((*MockJsonRestHandler)(nil).Get), ctx, endpoint, resp)
|
||||
}
|
||||
|
||||
// Host mocks base method.
|
||||
func (m *MockJsonRestHandler) Host() string {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "Host")
|
||||
ret0, _ := ret[0].(string)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// Host indicates an expected call of Host.
|
||||
func (mr *MockJsonRestHandlerMockRecorder) Host() *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Host", reflect.TypeOf((*MockJsonRestHandler)(nil).Host))
|
||||
}
|
||||
|
||||
// HttpClient mocks base method.
|
||||
func (m *MockJsonRestHandler) HttpClient() *http.Client {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "HttpClient")
|
||||
ret0, _ := ret[0].(*http.Client)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// HttpClient indicates an expected call of HttpClient.
|
||||
func (mr *MockJsonRestHandlerMockRecorder) HttpClient() *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "HttpClient", reflect.TypeOf((*MockJsonRestHandler)(nil).HttpClient))
|
||||
}
|
||||
|
||||
// Post mocks base method.
|
||||
func (m *MockJsonRestHandler) Post(ctx context.Context, endpoint string, headers map[string]string, data *bytes.Buffer, resp any) error {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "Post", ctx, endpoint, headers, data, resp)
|
||||
ret0, _ := ret[0].(error)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// Post indicates an expected call of Post.
|
||||
func (mr *MockJsonRestHandlerMockRecorder) Post(ctx, endpoint, headers, data, resp any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Post", reflect.TypeOf((*MockJsonRestHandler)(nil).Post), ctx, endpoint, headers, data, resp)
|
||||
}
|
||||
|
||||
// SetHost mocks base method.
|
||||
func (m *MockJsonRestHandler) SetHost(host string) {
|
||||
m.ctrl.T.Helper()
|
||||
m.ctrl.Call(m, "SetHost", host)
|
||||
}
|
||||
|
||||
// SetHost indicates an expected call of SetHost.
|
||||
func (mr *MockJsonRestHandlerMockRecorder) SetHost(host any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetHost", reflect.TypeOf((*MockJsonRestHandler)(nil).SetHost), host)
|
||||
}
|
||||
115
api/client/beacon/mock/node_client_mock.go
generated
115
api/client/beacon/mock/node_client_mock.go
generated
@@ -1,115 +0,0 @@
|
||||
//
|
||||
// Generated by this command:
|
||||
//
|
||||
// mockgen -package=validator_mock -destination=testing/validator-mock/node_client_mock.go github.com/prysmaticlabs/prysm/v5/validator/client/iface NodeClient
|
||||
//
|
||||
|
||||
// Package validator_mock is a generated GoMock package.
|
||||
package mock
|
||||
|
||||
import (
|
||||
context "context"
|
||||
reflect "reflect"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/health"
|
||||
eth "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
gomock "go.uber.org/mock/gomock"
|
||||
emptypb "google.golang.org/protobuf/types/known/emptypb"
|
||||
)
|
||||
|
||||
// MockNodeClient is a mock of NodeClient interface.
|
||||
type MockNodeClient struct {
|
||||
ctrl *gomock.Controller
|
||||
recorder *MockNodeClientMockRecorder
|
||||
}
|
||||
|
||||
// MockNodeClientMockRecorder is the mock recorder for MockNodeClient.
|
||||
type MockNodeClientMockRecorder struct {
|
||||
mock *MockNodeClient
|
||||
}
|
||||
|
||||
// NewMockNodeClient creates a new mock instance.
|
||||
func NewMockNodeClient(ctrl *gomock.Controller) *MockNodeClient {
|
||||
mock := &MockNodeClient{ctrl: ctrl}
|
||||
mock.recorder = &MockNodeClientMockRecorder{mock}
|
||||
return mock
|
||||
}
|
||||
|
||||
// EXPECT returns an object that allows the caller to indicate expected use.
|
||||
func (m *MockNodeClient) EXPECT() *MockNodeClientMockRecorder {
|
||||
return m.recorder
|
||||
}
|
||||
|
||||
// Genesis mocks base method.
|
||||
func (m *MockNodeClient) Genesis(arg0 context.Context, arg1 *emptypb.Empty) (*eth.Genesis, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "Genesis", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.Genesis)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// Genesis indicates an expected call of Genesis.
|
||||
func (mr *MockNodeClientMockRecorder) Genesis(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Genesis", reflect.TypeOf((*MockNodeClient)(nil).Genesis), arg0, arg1)
|
||||
}
|
||||
|
||||
// HealthTracker mocks base method.
|
||||
func (m *MockNodeClient) HealthTracker() health.HealthTracker {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "HealthTracker")
|
||||
ret0, _ := ret[0].(health.HealthTracker)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// HealthTracker indicates an expected call of HealthTracker.
|
||||
func (mr *MockNodeClientMockRecorder) HealthTracker() *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "HealthTracker", reflect.TypeOf((*MockNodeClient)(nil).HealthTracker))
|
||||
}
|
||||
|
||||
// Peers mocks base method.
|
||||
func (m *MockNodeClient) Peers(arg0 context.Context, arg1 *emptypb.Empty) (*eth.Peers, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "Peers", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.Peers)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// Peers indicates an expected call of Peers.
|
||||
func (mr *MockNodeClientMockRecorder) Peers(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Peers", reflect.TypeOf((*MockNodeClient)(nil).Peers), arg0, arg1)
|
||||
}
|
||||
|
||||
// SyncStatus mocks base method.
|
||||
func (m *MockNodeClient) SyncStatus(arg0 context.Context, arg1 *emptypb.Empty) (*eth.SyncStatus, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "SyncStatus", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.SyncStatus)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// SyncStatus indicates an expected call of SyncStatus.
|
||||
func (mr *MockNodeClientMockRecorder) SyncStatus(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SyncStatus", reflect.TypeOf((*MockNodeClient)(nil).SyncStatus), arg0, arg1)
|
||||
}
|
||||
|
||||
// Version mocks base method.
|
||||
func (m *MockNodeClient) Version(arg0 context.Context, arg1 *emptypb.Empty) (*eth.Version, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "Version", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.Version)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// Version indicates an expected call of Version.
|
||||
func (mr *MockNodeClientMockRecorder) Version(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Version", reflect.TypeOf((*MockNodeClient)(nil).Version), arg0, arg1)
|
||||
}
|
||||
73
api/client/beacon/mock/prysm_chain_client_mock.go
generated
73
api/client/beacon/mock/prysm_chain_client_mock.go
generated
@@ -1,73 +0,0 @@
|
||||
// Code generated by MockGen. DO NOT EDIT.
|
||||
// Source: github.com/prysmaticlabs/prysm/v5/validator/client/iface (interfaces: PrysmChainClient)
|
||||
//
|
||||
// Generated by this command:
|
||||
//
|
||||
// mockgen -package=validator_mock -destination=testing/validator-mock/prysm_chain_client_mock.go github.com/prysmaticlabs/prysm/v5/validator/client/iface PrysmChainClient
|
||||
//
|
||||
|
||||
// Package validator_mock is a generated GoMock package.
|
||||
package mock
|
||||
|
||||
import (
|
||||
context "context"
|
||||
reflect "reflect"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon"
|
||||
validator "github.com/prysmaticlabs/prysm/v5/consensus-types/validator"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
gomock "go.uber.org/mock/gomock"
|
||||
)
|
||||
|
||||
// MockPrysmChainClient is a mock of PrysmChainClient interface.
|
||||
type MockPrysmChainClient struct {
|
||||
ctrl *gomock.Controller
|
||||
recorder *MockPrysmChainClientMockRecorder
|
||||
}
|
||||
|
||||
// MockPrysmChainClientMockRecorder is the mock recorder for MockPrysmChainClient.
|
||||
type MockPrysmChainClientMockRecorder struct {
|
||||
mock *MockPrysmChainClient
|
||||
}
|
||||
|
||||
// NewMockPrysmChainClient creates a new mock instance.
|
||||
func NewMockPrysmChainClient(ctrl *gomock.Controller) *MockPrysmChainClient {
|
||||
mock := &MockPrysmChainClient{ctrl: ctrl}
|
||||
mock.recorder = &MockPrysmChainClientMockRecorder{mock}
|
||||
return mock
|
||||
}
|
||||
|
||||
// EXPECT returns an object that allows the caller to indicate expected use.
|
||||
func (m *MockPrysmChainClient) EXPECT() *MockPrysmChainClientMockRecorder {
|
||||
return m.recorder
|
||||
}
|
||||
|
||||
// ValidatorCount mocks base method.
|
||||
func (m *MockPrysmChainClient) ValidatorCount(arg0 context.Context, arg1 string, arg2 []validator.Status) ([]beacon.ValidatorCount, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ValidatorCount", arg0, arg1, arg2)
|
||||
ret0, _ := ret[0].([]beacon.ValidatorCount)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// ValidatorCount indicates an expected call of ValidatorCount.
|
||||
func (mr *MockPrysmChainClientMockRecorder) ValidatorCount(arg0, arg1, arg2 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidatorCount", reflect.TypeOf((*MockPrysmChainClient)(nil).ValidatorCount), arg0, arg1, arg2)
|
||||
}
|
||||
|
||||
// ValidatorPerformance mocks base method.
|
||||
func (m *MockPrysmChainClient) ValidatorPerformance(arg0 context.Context, arg1 *ethpb.ValidatorPerformanceRequest) (*ethpb.ValidatorPerformanceResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ValidatorPerformance", arg0, arg1)
|
||||
ret0, _ := ret[0].(*ethpb.ValidatorPerformanceResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// ValidatorPerformance indicates an expected call of ValidatorPerformance.
|
||||
func (mr *MockPrysmChainClientMockRecorder) ValidatorPerformance(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidatorCount", reflect.TypeOf((*MockPrysmChainClient)(nil).ValidatorPerformance), arg0, arg1)
|
||||
}
|
||||
87
api/client/beacon/mock/state_validators_mock.go
generated
87
api/client/beacon/mock/state_validators_mock.go
generated
@@ -1,87 +0,0 @@
|
||||
// Code generated by MockGen. DO NOT EDIT.
|
||||
// Source: validator/client/beacon-api/state_validators.go
|
||||
//
|
||||
// Generated by this command:
|
||||
//
|
||||
// mockgen -package=mock -source=validator/client/beacon-api/state_validators.go -destination=validator/client/beacon-api/mock/state_validators_mock.go
|
||||
//
|
||||
|
||||
// Package mock is a generated GoMock package.
|
||||
package mock
|
||||
|
||||
import (
|
||||
context "context"
|
||||
reflect "reflect"
|
||||
|
||||
structs "github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
primitives "github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
gomock "go.uber.org/mock/gomock"
|
||||
)
|
||||
|
||||
// MockStateValidatorsProvider is a mock of StateValidatorsProvider interface.
|
||||
type MockStateValidatorsProvider struct {
|
||||
ctrl *gomock.Controller
|
||||
recorder *MockStateValidatorsProviderMockRecorder
|
||||
}
|
||||
|
||||
// MockStateValidatorsProviderMockRecorder is the mock recorder for MockStateValidatorsProvider.
|
||||
type MockStateValidatorsProviderMockRecorder struct {
|
||||
mock *MockStateValidatorsProvider
|
||||
}
|
||||
|
||||
// NewMockStateValidatorsProvider creates a new mock instance.
|
||||
func NewMockStateValidatorsProvider(ctrl *gomock.Controller) *MockStateValidatorsProvider {
|
||||
mock := &MockStateValidatorsProvider{ctrl: ctrl}
|
||||
mock.recorder = &MockStateValidatorsProviderMockRecorder{mock}
|
||||
return mock
|
||||
}
|
||||
|
||||
// EXPECT returns an object that allows the caller to indicate expected use.
|
||||
func (m *MockStateValidatorsProvider) EXPECT() *MockStateValidatorsProviderMockRecorder {
|
||||
return m.recorder
|
||||
}
|
||||
|
||||
// StateValidators mocks base method.
|
||||
func (m *MockStateValidatorsProvider) StateValidators(arg0 context.Context, arg1 []string, arg2 []primitives.ValidatorIndex, arg3 []string) (*structs.GetValidatorsResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "StateValidators", arg0, arg1, arg2, arg3)
|
||||
ret0, _ := ret[0].(*structs.GetValidatorsResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// StateValidators indicates an expected call of StateValidators.
|
||||
func (mr *MockStateValidatorsProviderMockRecorder) StateValidators(arg0, arg1, arg2, arg3 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "StateValidators", reflect.TypeOf((*MockStateValidatorsProvider)(nil).StateValidators), arg0, arg1, arg2, arg3)
|
||||
}
|
||||
|
||||
// StateValidatorsForHead mocks base method.
|
||||
func (m *MockStateValidatorsProvider) StateValidatorsForHead(arg0 context.Context, arg1 []string, arg2 []primitives.ValidatorIndex, arg3 []string) (*structs.GetValidatorsResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "StateValidatorsForHead", arg0, arg1, arg2, arg3)
|
||||
ret0, _ := ret[0].(*structs.GetValidatorsResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// StateValidatorsForHead indicates an expected call of StateValidatorsForHead.
|
||||
func (mr *MockStateValidatorsProviderMockRecorder) StateValidatorsForHead(arg0, arg1, arg2, arg3 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "StateValidatorsForHead", reflect.TypeOf((*MockStateValidatorsProvider)(nil).StateValidatorsForHead), arg0, arg1, arg2, arg3)
|
||||
}
|
||||
|
||||
// StateValidatorsForSlot mocks base method.
|
||||
func (m *MockStateValidatorsProvider) StateValidatorsForSlot(arg0 context.Context, arg1 primitives.Slot, arg2 []string, arg3 []primitives.ValidatorIndex, arg4 []string) (*structs.GetValidatorsResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "StateValidatorsForSlot", arg0, arg1, arg2, arg3, arg4)
|
||||
ret0, _ := ret[0].(*structs.GetValidatorsResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// StateValidatorsForSlot indicates an expected call of StateValidatorsForSlot.
|
||||
func (mr *MockStateValidatorsProviderMockRecorder) StateValidatorsForSlot(arg0, arg1, arg2, arg3, arg4 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "StateValidatorsForSlot", reflect.TypeOf((*MockStateValidatorsProvider)(nil).StateValidatorsForSlot), arg0, arg1, arg2, arg3, arg4)
|
||||
}
|
||||
517
api/client/beacon/mock/validator_client_mock.go
generated
517
api/client/beacon/mock/validator_client_mock.go
generated
@@ -1,517 +0,0 @@
|
||||
// Code generated by MockGen. DO NOT EDIT.
|
||||
// Source: github.com/prysmaticlabs/prysm/v5/validator/client/iface (interfaces: ValidatorClient)
|
||||
//
|
||||
// Generated by this command:
|
||||
//
|
||||
// mockgen -package=validator_mock -destination=testing/validator-mock/validator_client_mock.go github.com/prysmaticlabs/prysm/v5/validator/client/iface ValidatorClient
|
||||
//
|
||||
|
||||
// Package validator_mock is a generated GoMock package.
|
||||
package mock
|
||||
|
||||
import (
|
||||
context "context"
|
||||
reflect "reflect"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon"
|
||||
event "github.com/prysmaticlabs/prysm/v5/api/client/event"
|
||||
primitives "github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
eth "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
gomock "go.uber.org/mock/gomock"
|
||||
emptypb "google.golang.org/protobuf/types/known/emptypb"
|
||||
)
|
||||
|
||||
// MockValidatorClient is a mock of ValidatorClient interface.
|
||||
type MockValidatorClient struct {
|
||||
ctrl *gomock.Controller
|
||||
recorder *MockValidatorClientMockRecorder
|
||||
}
|
||||
|
||||
// MockValidatorClientMockRecorder is the mock recorder for MockValidatorClient.
|
||||
type MockValidatorClientMockRecorder struct {
|
||||
mock *MockValidatorClient
|
||||
}
|
||||
|
||||
// NewMockValidatorClient creates a new mock instance.
|
||||
func NewMockValidatorClient(ctrl *gomock.Controller) *MockValidatorClient {
|
||||
mock := &MockValidatorClient{ctrl: ctrl}
|
||||
mock.recorder = &MockValidatorClientMockRecorder{mock}
|
||||
return mock
|
||||
}
|
||||
|
||||
// EXPECT returns an object that allows the caller to indicate expected use.
|
||||
func (m *MockValidatorClient) EXPECT() *MockValidatorClientMockRecorder {
|
||||
return m.recorder
|
||||
}
|
||||
|
||||
// AggregatedSelections mocks base method.
|
||||
func (m *MockValidatorClient) AggregatedSelections(arg0 context.Context, arg1 []beacon.BeaconCommitteeSelection) ([]beacon.BeaconCommitteeSelection, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "AggregatedSelections", arg0, arg1)
|
||||
ret0, _ := ret[0].([]beacon.BeaconCommitteeSelection)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// AggregatedSelections indicates an expected call of AggregatedSelections.
|
||||
func (mr *MockValidatorClientMockRecorder) AggregatedSelections(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AggregatedSelections", reflect.TypeOf((*MockValidatorClient)(nil).AggregatedSelections), arg0, arg1)
|
||||
}
|
||||
|
||||
// AggregatedSyncSelections mocks base method.
|
||||
func (m *MockValidatorClient) AggregatedSyncSelections(arg0 context.Context, arg1 []beacon.SyncCommitteeSelection) ([]beacon.SyncCommitteeSelection, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "AggregatedSyncSelections", arg0, arg1)
|
||||
ret0, _ := ret[0].([]beacon.SyncCommitteeSelection)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// AggregatedSyncSelections indicates an expected call of AggregatedSyncSelections.
|
||||
func (mr *MockValidatorClientMockRecorder) AggregatedSyncSelections(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AggregatedSyncSelections", reflect.TypeOf((*MockValidatorClient)(nil).AggregatedSyncSelections), arg0, arg1)
|
||||
}
|
||||
|
||||
// AttestationData mocks base method.
|
||||
func (m *MockValidatorClient) AttestationData(arg0 context.Context, arg1 *eth.AttestationDataRequest) (*eth.AttestationData, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "AttestationData", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.AttestationData)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// AttestationData indicates an expected call of AttestationData.
|
||||
func (mr *MockValidatorClientMockRecorder) AttestationData(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AttestationData", reflect.TypeOf((*MockValidatorClient)(nil).AttestationData), arg0, arg1)
|
||||
}
|
||||
|
||||
// BeaconBlock mocks base method.
|
||||
func (m *MockValidatorClient) BeaconBlock(arg0 context.Context, arg1 *eth.BlockRequest) (*eth.GenericBeaconBlock, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "BeaconBlock", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.GenericBeaconBlock)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// BeaconBlock indicates an expected call of BeaconBlock.
|
||||
func (mr *MockValidatorClientMockRecorder) BeaconBlock(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BeaconBlock", reflect.TypeOf((*MockValidatorClient)(nil).BeaconBlock), arg0, arg1)
|
||||
}
|
||||
|
||||
// CheckDoppelGanger mocks base method.
|
||||
func (m *MockValidatorClient) CheckDoppelGanger(arg0 context.Context, arg1 *eth.DoppelGangerRequest) (*eth.DoppelGangerResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "CheckDoppelGanger", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.DoppelGangerResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// CheckDoppelGanger indicates an expected call of CheckDoppelGanger.
|
||||
func (mr *MockValidatorClientMockRecorder) CheckDoppelGanger(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CheckDoppelGanger", reflect.TypeOf((*MockValidatorClient)(nil).CheckDoppelGanger), arg0, arg1)
|
||||
}
|
||||
|
||||
// DomainData mocks base method.
|
||||
func (m *MockValidatorClient) DomainData(arg0 context.Context, arg1 *eth.DomainRequest) (*eth.DomainResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "DomainData", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.DomainResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// DomainData indicates an expected call of DomainData.
|
||||
func (mr *MockValidatorClientMockRecorder) DomainData(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DomainData", reflect.TypeOf((*MockValidatorClient)(nil).DomainData), arg0, arg1)
|
||||
}
|
||||
|
||||
// Duties mocks base method.
|
||||
func (m *MockValidatorClient) Duties(arg0 context.Context, arg1 *eth.DutiesRequest) (*eth.ValidatorDutiesContainer, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "Duties", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.ValidatorDutiesContainer)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// Duties indicates an expected call of Duties.
|
||||
func (mr *MockValidatorClientMockRecorder) Duties(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Duties", reflect.TypeOf((*MockValidatorClient)(nil).Duties), arg0, arg1)
|
||||
}
|
||||
|
||||
// EventStreamIsRunning mocks base method.
|
||||
func (m *MockValidatorClient) EventStreamIsRunning() bool {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "EventStreamIsRunning")
|
||||
ret0, _ := ret[0].(bool)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// EventStreamIsRunning indicates an expected call of EventStreamIsRunning.
|
||||
func (mr *MockValidatorClientMockRecorder) EventStreamIsRunning() *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "EventStreamIsRunning", reflect.TypeOf((*MockValidatorClient)(nil).EventStreamIsRunning))
|
||||
}
|
||||
|
||||
// FeeRecipientByPubKey mocks base method.
|
||||
func (m *MockValidatorClient) FeeRecipientByPubKey(arg0 context.Context, arg1 *eth.FeeRecipientByPubKeyRequest) (*eth.FeeRecipientByPubKeyResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "FeeRecipientByPubKey", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.FeeRecipientByPubKeyResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// FeeRecipientByPubKey indicates an expected call of FeeRecipientByPubKey.
|
||||
func (mr *MockValidatorClientMockRecorder) FeeRecipientByPubKey(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FeeRecipientByPubKey", reflect.TypeOf((*MockValidatorClient)(nil).FeeRecipientByPubKey), arg0, arg1)
|
||||
}
|
||||
|
||||
// Host mocks base method.
|
||||
func (m *MockValidatorClient) Host() string {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "Host")
|
||||
ret0, _ := ret[0].(string)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// Host indicates an expected call of Host.
|
||||
func (mr *MockValidatorClientMockRecorder) Host() *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Host", reflect.TypeOf((*MockValidatorClient)(nil).Host))
|
||||
}
|
||||
|
||||
// MultipleValidatorStatus mocks base method.
|
||||
func (m *MockValidatorClient) MultipleValidatorStatus(arg0 context.Context, arg1 *eth.MultipleValidatorStatusRequest) (*eth.MultipleValidatorStatusResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "MultipleValidatorStatus", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.MultipleValidatorStatusResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// MultipleValidatorStatus indicates an expected call of MultipleValidatorStatus.
|
||||
func (mr *MockValidatorClientMockRecorder) MultipleValidatorStatus(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "MultipleValidatorStatus", reflect.TypeOf((*MockValidatorClient)(nil).MultipleValidatorStatus), arg0, arg1)
|
||||
}
|
||||
|
||||
// PrepareBeaconProposer mocks base method.
|
||||
func (m *MockValidatorClient) PrepareBeaconProposer(arg0 context.Context, arg1 *eth.PrepareBeaconProposerRequest) (*emptypb.Empty, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "PrepareBeaconProposer", arg0, arg1)
|
||||
ret0, _ := ret[0].(*emptypb.Empty)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// PrepareBeaconProposer indicates an expected call of PrepareBeaconProposer.
|
||||
func (mr *MockValidatorClientMockRecorder) PrepareBeaconProposer(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PrepareBeaconProposer", reflect.TypeOf((*MockValidatorClient)(nil).PrepareBeaconProposer), arg0, arg1)
|
||||
}
|
||||
|
||||
// ProposeAttestation mocks base method.
|
||||
func (m *MockValidatorClient) ProposeAttestation(arg0 context.Context, arg1 *eth.Attestation) (*eth.AttestResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ProposeAttestation", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.AttestResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// ProposeAttestation indicates an expected call of ProposeAttestation.
|
||||
func (mr *MockValidatorClientMockRecorder) ProposeAttestation(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ProposeAttestation", reflect.TypeOf((*MockValidatorClient)(nil).ProposeAttestation), arg0, arg1)
|
||||
}
|
||||
|
||||
// ProposeAttestationElectra mocks base method.
|
||||
func (m *MockValidatorClient) ProposeAttestationElectra(arg0 context.Context, arg1 *eth.SingleAttestation) (*eth.AttestResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ProposeAttestationElectra", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.AttestResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// ProposeAttestationElectra indicates an expected call of ProposeAttestationElectra.
|
||||
func (mr *MockValidatorClientMockRecorder) ProposeAttestationElectra(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ProposeAttestationElectra", reflect.TypeOf((*MockValidatorClient)(nil).ProposeAttestationElectra), arg0, arg1)
|
||||
}
|
||||
|
||||
// ProposeBeaconBlock mocks base method.
|
||||
func (m *MockValidatorClient) ProposeBeaconBlock(arg0 context.Context, arg1 *eth.GenericSignedBeaconBlock) (*eth.ProposeResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ProposeBeaconBlock", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.ProposeResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// ProposeBeaconBlock indicates an expected call of ProposeBeaconBlock.
|
||||
func (mr *MockValidatorClientMockRecorder) ProposeBeaconBlock(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ProposeBeaconBlock", reflect.TypeOf((*MockValidatorClient)(nil).ProposeBeaconBlock), arg0, arg1)
|
||||
}
|
||||
|
||||
// ProposeExit mocks base method.
|
||||
func (m *MockValidatorClient) ProposeExit(arg0 context.Context, arg1 *eth.SignedVoluntaryExit) (*eth.ProposeExitResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ProposeExit", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.ProposeExitResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// ProposeExit indicates an expected call of ProposeExit.
|
||||
func (mr *MockValidatorClientMockRecorder) ProposeExit(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ProposeExit", reflect.TypeOf((*MockValidatorClient)(nil).ProposeExit), arg0, arg1)
|
||||
}
|
||||
|
||||
// SetHost mocks base method.
|
||||
func (m *MockValidatorClient) SetHost(arg0 string) {
|
||||
m.ctrl.T.Helper()
|
||||
m.ctrl.Call(m, "SetHost", arg0)
|
||||
}
|
||||
|
||||
// SetHost indicates an expected call of SetHost.
|
||||
func (mr *MockValidatorClientMockRecorder) SetHost(arg0 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetHost", reflect.TypeOf((*MockValidatorClient)(nil).SetHost), arg0)
|
||||
}
|
||||
|
||||
// StartEventStream mocks base method.
|
||||
func (m *MockValidatorClient) StartEventStream(arg0 context.Context, arg1 []string, arg2 chan<- *event.Event) {
|
||||
m.ctrl.T.Helper()
|
||||
m.ctrl.Call(m, "StartEventStream", arg0, arg1, arg2)
|
||||
}
|
||||
|
||||
// StartEventStream indicates an expected call of StartEventStream.
|
||||
func (mr *MockValidatorClientMockRecorder) StartEventStream(arg0, arg1, arg2 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "StartEventStream", reflect.TypeOf((*MockValidatorClient)(nil).StartEventStream), arg0, arg1, arg2)
|
||||
}
|
||||
|
||||
// SubmitAggregateSelectionProof mocks base method.
|
||||
func (m *MockValidatorClient) SubmitAggregateSelectionProof(arg0 context.Context, arg1 *eth.AggregateSelectionRequest, arg2 primitives.ValidatorIndex, arg3 uint64) (*eth.AggregateSelectionResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "SubmitAggregateSelectionProof", arg0, arg1, arg2, arg3)
|
||||
ret0, _ := ret[0].(*eth.AggregateSelectionResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// SubmitAggregateSelectionProof indicates an expected call of SubmitAggregateSelectionProof.
|
||||
func (mr *MockValidatorClientMockRecorder) SubmitAggregateSelectionProof(arg0, arg1, arg2, arg3 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SubmitAggregateSelectionProof", reflect.TypeOf((*MockValidatorClient)(nil).SubmitAggregateSelectionProof), arg0, arg1, arg2, arg3)
|
||||
}
|
||||
|
||||
// SubmitAggregateSelectionProofElectra mocks base method.
|
||||
func (m *MockValidatorClient) SubmitAggregateSelectionProofElectra(arg0 context.Context, arg1 *eth.AggregateSelectionRequest, arg2 primitives.ValidatorIndex, arg3 uint64) (*eth.AggregateSelectionElectraResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "SubmitAggregateSelectionProofElectra", arg0, arg1, arg2, arg3)
|
||||
ret0, _ := ret[0].(*eth.AggregateSelectionElectraResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// SubmitAggregateSelectionProofElectra indicates an expected call of SubmitAggregateSelectionProofElectra.
|
||||
func (mr *MockValidatorClientMockRecorder) SubmitAggregateSelectionProofElectra(arg0, arg1, arg2, arg3 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SubmitAggregateSelectionProofElectra", reflect.TypeOf((*MockValidatorClient)(nil).SubmitAggregateSelectionProofElectra), arg0, arg1, arg2, arg3)
|
||||
}
|
||||
|
||||
// SubmitSignedAggregateSelectionProof mocks base method.
|
||||
func (m *MockValidatorClient) SubmitSignedAggregateSelectionProof(arg0 context.Context, arg1 *eth.SignedAggregateSubmitRequest) (*eth.SignedAggregateSubmitResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "SubmitSignedAggregateSelectionProof", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.SignedAggregateSubmitResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// SubmitSignedAggregateSelectionProof indicates an expected call of SubmitSignedAggregateSelectionProof.
|
||||
func (mr *MockValidatorClientMockRecorder) SubmitSignedAggregateSelectionProof(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SubmitSignedAggregateSelectionProof", reflect.TypeOf((*MockValidatorClient)(nil).SubmitSignedAggregateSelectionProof), arg0, arg1)
|
||||
}
|
||||
|
||||
// SubmitSignedAggregateSelectionProofElectra mocks base method.
|
||||
func (m *MockValidatorClient) SubmitSignedAggregateSelectionProofElectra(arg0 context.Context, arg1 *eth.SignedAggregateSubmitElectraRequest) (*eth.SignedAggregateSubmitResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "SubmitSignedAggregateSelectionProofElectra", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.SignedAggregateSubmitResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// SubmitSignedAggregateSelectionProofElectra indicates an expected call of SubmitSignedAggregateSelectionProofElectra.
|
||||
func (mr *MockValidatorClientMockRecorder) SubmitSignedAggregateSelectionProofElectra(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SubmitSignedAggregateSelectionProofElectra", reflect.TypeOf((*MockValidatorClient)(nil).SubmitSignedAggregateSelectionProofElectra), arg0, arg1)
|
||||
}
|
||||
|
||||
// SubmitSignedContributionAndProof mocks base method.
|
||||
func (m *MockValidatorClient) SubmitSignedContributionAndProof(arg0 context.Context, arg1 *eth.SignedContributionAndProof) (*emptypb.Empty, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "SubmitSignedContributionAndProof", arg0, arg1)
|
||||
ret0, _ := ret[0].(*emptypb.Empty)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// SubmitSignedContributionAndProof indicates an expected call of SubmitSignedContributionAndProof.
|
||||
func (mr *MockValidatorClientMockRecorder) SubmitSignedContributionAndProof(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SubmitSignedContributionAndProof", reflect.TypeOf((*MockValidatorClient)(nil).SubmitSignedContributionAndProof), arg0, arg1)
|
||||
}
|
||||
|
||||
// SubmitSyncMessage mocks base method.
|
||||
func (m *MockValidatorClient) SubmitSyncMessage(arg0 context.Context, arg1 *eth.SyncCommitteeMessage) (*emptypb.Empty, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "SubmitSyncMessage", arg0, arg1)
|
||||
ret0, _ := ret[0].(*emptypb.Empty)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// SubmitSyncMessage indicates an expected call of SubmitSyncMessage.
|
||||
func (mr *MockValidatorClientMockRecorder) SubmitSyncMessage(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SubmitSyncMessage", reflect.TypeOf((*MockValidatorClient)(nil).SubmitSyncMessage), arg0, arg1)
|
||||
}
|
||||
|
||||
// SubmitValidatorRegistrations mocks base method.
|
||||
func (m *MockValidatorClient) SubmitValidatorRegistrations(arg0 context.Context, arg1 *eth.SignedValidatorRegistrationsV1) (*emptypb.Empty, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "SubmitValidatorRegistrations", arg0, arg1)
|
||||
ret0, _ := ret[0].(*emptypb.Empty)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// SubmitValidatorRegistrations indicates an expected call of SubmitValidatorRegistrations.
|
||||
func (mr *MockValidatorClientMockRecorder) SubmitValidatorRegistrations(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SubmitValidatorRegistrations", reflect.TypeOf((*MockValidatorClient)(nil).SubmitValidatorRegistrations), arg0, arg1)
|
||||
}
|
||||
|
||||
// SubscribeCommitteeSubnets mocks base method.
|
||||
func (m *MockValidatorClient) SubscribeCommitteeSubnets(arg0 context.Context, arg1 *eth.CommitteeSubnetsSubscribeRequest, arg2 []*eth.ValidatorDuty) (*emptypb.Empty, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "SubscribeCommitteeSubnets", arg0, arg1, arg2)
|
||||
ret0, _ := ret[0].(*emptypb.Empty)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// SubscribeCommitteeSubnets indicates an expected call of SubscribeCommitteeSubnets.
|
||||
func (mr *MockValidatorClientMockRecorder) SubscribeCommitteeSubnets(arg0, arg1, arg2 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SubscribeCommitteeSubnets", reflect.TypeOf((*MockValidatorClient)(nil).SubscribeCommitteeSubnets), arg0, arg1, arg2)
|
||||
}
|
||||
|
||||
// SyncCommitteeContribution mocks base method.
|
||||
func (m *MockValidatorClient) SyncCommitteeContribution(arg0 context.Context, arg1 *eth.SyncCommitteeContributionRequest) (*eth.SyncCommitteeContribution, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "SyncCommitteeContribution", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.SyncCommitteeContribution)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// SyncCommitteeContribution indicates an expected call of SyncCommitteeContribution.
|
||||
func (mr *MockValidatorClientMockRecorder) SyncCommitteeContribution(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SyncCommitteeContribution", reflect.TypeOf((*MockValidatorClient)(nil).SyncCommitteeContribution), arg0, arg1)
|
||||
}
|
||||
|
||||
// SyncMessageBlockRoot mocks base method.
|
||||
func (m *MockValidatorClient) SyncMessageBlockRoot(arg0 context.Context, arg1 *emptypb.Empty) (*eth.SyncMessageBlockRootResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "SyncMessageBlockRoot", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.SyncMessageBlockRootResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// SyncMessageBlockRoot indicates an expected call of SyncMessageBlockRoot.
|
||||
func (mr *MockValidatorClientMockRecorder) SyncMessageBlockRoot(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SyncMessageBlockRoot", reflect.TypeOf((*MockValidatorClient)(nil).SyncMessageBlockRoot), arg0, arg1)
|
||||
}
|
||||
|
||||
// SyncSubcommitteeIndex mocks base method.
|
||||
func (m *MockValidatorClient) SyncSubcommitteeIndex(arg0 context.Context, arg1 *eth.SyncSubcommitteeIndexRequest) (*eth.SyncSubcommitteeIndexResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "SyncSubcommitteeIndex", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.SyncSubcommitteeIndexResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// SyncSubcommitteeIndex indicates an expected call of SyncSubcommitteeIndex.
|
||||
func (mr *MockValidatorClientMockRecorder) SyncSubcommitteeIndex(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SyncSubcommitteeIndex", reflect.TypeOf((*MockValidatorClient)(nil).SyncSubcommitteeIndex), arg0, arg1)
|
||||
}
|
||||
|
||||
// ValidatorIndex mocks base method.
|
||||
func (m *MockValidatorClient) ValidatorIndex(arg0 context.Context, arg1 *eth.ValidatorIndexRequest) (*eth.ValidatorIndexResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ValidatorIndex", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.ValidatorIndexResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// ValidatorIndex indicates an expected call of ValidatorIndex.
|
||||
func (mr *MockValidatorClientMockRecorder) ValidatorIndex(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidatorIndex", reflect.TypeOf((*MockValidatorClient)(nil).ValidatorIndex), arg0, arg1)
|
||||
}
|
||||
|
||||
// ValidatorStatus mocks base method.
|
||||
func (m *MockValidatorClient) ValidatorStatus(arg0 context.Context, arg1 *eth.ValidatorStatusRequest) (*eth.ValidatorStatusResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ValidatorStatus", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.ValidatorStatusResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// ValidatorStatus indicates an expected call of ValidatorStatus.
|
||||
func (mr *MockValidatorClientMockRecorder) ValidatorStatus(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidatorStatus", reflect.TypeOf((*MockValidatorClient)(nil).ValidatorStatus), arg0, arg1)
|
||||
}
|
||||
|
||||
// WaitForChainStart mocks base method.
|
||||
func (m *MockValidatorClient) WaitForChainStart(arg0 context.Context, arg1 *emptypb.Empty) (*eth.ChainStartResponse, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "WaitForChainStart", arg0, arg1)
|
||||
ret0, _ := ret[0].(*eth.ChainStartResponse)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// WaitForChainStart indicates an expected call of WaitForChainStart.
|
||||
func (mr *MockValidatorClientMockRecorder) WaitForChainStart(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "WaitForChainStart", reflect.TypeOf((*MockValidatorClient)(nil).WaitForChainStart), arg0, arg1)
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library", "go_test")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"client.go",
|
||||
"grpc_node_client.go",
|
||||
"interfaces.go",
|
||||
"rest_client.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/client/beacon/node",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//api/client:go_default_library",
|
||||
"//api/client/beacon/health:go_default_library",
|
||||
"//api/client/beacon/shared_providers:go_default_library",
|
||||
"//api/server/structs:go_default_library",
|
||||
"//config/features:go_default_library",
|
||||
"//proto/prysm/v1alpha1:go_default_library",
|
||||
"//validator/helpers:go_default_library",
|
||||
"@com_github_ethereum_go_ethereum//common/hexutil:go_default_library",
|
||||
"@com_github_golang_protobuf//ptypes/empty",
|
||||
"@com_github_pkg_errors//:go_default_library",
|
||||
"@com_github_sirupsen_logrus//:go_default_library",
|
||||
"@org_golang_google_grpc//:go_default_library",
|
||||
"@org_golang_google_protobuf//types/known/timestamppb:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "go_default_test",
|
||||
srcs = ["rest_client_test.go"],
|
||||
embed = [":go_default_library"],
|
||||
deps = [
|
||||
"//api/client/beacon/mock:go_default_library",
|
||||
"//api/server/structs:go_default_library",
|
||||
"//proto/prysm/v1alpha1:go_default_library",
|
||||
"//testing/assert:go_default_library",
|
||||
"@com_github_ethereum_go_ethereum//common/hexutil:go_default_library",
|
||||
"@org_golang_google_protobuf//types/known/emptypb:go_default_library",
|
||||
"@org_golang_google_protobuf//types/known/timestamppb:go_default_library",
|
||||
"@org_uber_go_mock//gomock:go_default_library",
|
||||
],
|
||||
)
|
||||
@@ -1,28 +0,0 @@
|
||||
package node
|
||||
|
||||
import (
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/health"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/shared_providers"
|
||||
"github.com/prysmaticlabs/prysm/v5/config/features"
|
||||
validatorHelpers "github.com/prysmaticlabs/prysm/v5/validator/helpers"
|
||||
)
|
||||
|
||||
func NewClient(validatorConn validatorHelpers.NodeConnection, jsonRestHandler client.JsonRestHandler) Client {
|
||||
grpcClient := NewNodeClient(validatorConn.GetGrpcClientConn())
|
||||
if features.Get().EnableBeaconRESTApi {
|
||||
return NewNodeClientWithFallback(jsonRestHandler, grpcClient)
|
||||
} else {
|
||||
return grpcClient
|
||||
}
|
||||
}
|
||||
|
||||
func NewNodeClientWithFallback(jsonRestHandler client.JsonRestHandler, fallbackClient Client) Client {
|
||||
b := &beaconapiNodeClient{
|
||||
jsonRestHandler: jsonRestHandler,
|
||||
fallbackClient: fallbackClient,
|
||||
genesisProvider: shared_providers.NewGenesis(jsonRestHandler),
|
||||
}
|
||||
b.healthTracker = health.NewTracker(b)
|
||||
return b
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
package node
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/golang/protobuf/ptypes/empty"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/health"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"google.golang.org/grpc"
|
||||
)
|
||||
|
||||
var (
|
||||
_ = Client(&grpcNodeClient{})
|
||||
)
|
||||
|
||||
type grpcNodeClient struct {
|
||||
nodeClient ethpb.NodeClient
|
||||
healthTracker health.HealthTracker
|
||||
}
|
||||
|
||||
func (c *grpcNodeClient) SyncStatus(ctx context.Context, in *empty.Empty) (*ethpb.SyncStatus, error) {
|
||||
return c.nodeClient.GetSyncStatus(ctx, in)
|
||||
}
|
||||
|
||||
func (c *grpcNodeClient) Genesis(ctx context.Context, in *empty.Empty) (*ethpb.Genesis, error) {
|
||||
return c.nodeClient.GetGenesis(ctx, in)
|
||||
}
|
||||
|
||||
func (c *grpcNodeClient) Version(ctx context.Context, in *empty.Empty) (*ethpb.Version, error) {
|
||||
return c.nodeClient.GetVersion(ctx, in)
|
||||
}
|
||||
|
||||
func (c *grpcNodeClient) Peers(ctx context.Context, in *empty.Empty) (*ethpb.Peers, error) {
|
||||
return c.nodeClient.ListPeers(ctx, in)
|
||||
}
|
||||
|
||||
func (c *grpcNodeClient) IsHealthy(ctx context.Context) bool {
|
||||
_, err := c.nodeClient.GetHealth(ctx, ðpb.HealthRequest{})
|
||||
if err != nil {
|
||||
log.WithError(err).Debug("failed to get health of node")
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (c *grpcNodeClient) HealthTracker() health.HealthTracker {
|
||||
return c.healthTracker
|
||||
}
|
||||
|
||||
func NewNodeClient(cc grpc.ClientConnInterface) Client {
|
||||
g := &grpcNodeClient{nodeClient: ethpb.NewNodeClient(cc)}
|
||||
g.healthTracker = health.NewTracker(g)
|
||||
return g
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
package node
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/golang/protobuf/ptypes/empty"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/health"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
)
|
||||
|
||||
type Client interface {
|
||||
SyncStatus(ctx context.Context, in *empty.Empty) (*ethpb.SyncStatus, error)
|
||||
Genesis(ctx context.Context, in *empty.Empty) (*ethpb.Genesis, error)
|
||||
Version(ctx context.Context, in *empty.Empty) (*ethpb.Version, error)
|
||||
Peers(ctx context.Context, in *empty.Empty) (*ethpb.Peers, error)
|
||||
HealthTracker() health.HealthTracker
|
||||
}
|
||||
@@ -1,113 +0,0 @@
|
||||
package node
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strconv"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/golang/protobuf/ptypes/empty"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/health"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/shared_providers"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
"google.golang.org/protobuf/types/known/timestamppb"
|
||||
)
|
||||
|
||||
var (
|
||||
_ = Client(&beaconapiNodeClient{})
|
||||
)
|
||||
|
||||
type beaconapiNodeClient struct {
|
||||
fallbackClient Client
|
||||
jsonRestHandler client.JsonRestHandler
|
||||
genesisProvider shared_providers.Genesis
|
||||
healthTracker health.HealthTracker
|
||||
}
|
||||
|
||||
func (c *beaconapiNodeClient) SyncStatus(ctx context.Context, _ *empty.Empty) (*ethpb.SyncStatus, error) {
|
||||
syncingResponse := structs.SyncStatusResponse{}
|
||||
if err := c.jsonRestHandler.Get(ctx, "/eth/v1/node/syncing", &syncingResponse); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if syncingResponse.Data == nil {
|
||||
return nil, errors.New("syncing data is nil")
|
||||
}
|
||||
|
||||
return ðpb.SyncStatus{
|
||||
Syncing: syncingResponse.Data.IsSyncing,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (c *beaconapiNodeClient) Genesis(ctx context.Context, _ *empty.Empty) (*ethpb.Genesis, error) {
|
||||
genesisJson, err := c.genesisProvider.Genesis(ctx)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get genesis")
|
||||
}
|
||||
|
||||
genesisValidatorRoot, err := hexutil.Decode(genesisJson.GenesisValidatorsRoot)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode genesis validator root `%s`", genesisJson.GenesisValidatorsRoot)
|
||||
}
|
||||
|
||||
genesisTime, err := strconv.ParseInt(genesisJson.GenesisTime, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse genesis time `%s`", genesisJson.GenesisTime)
|
||||
}
|
||||
|
||||
depositContractJson := structs.GetDepositContractResponse{}
|
||||
if err = c.jsonRestHandler.Get(ctx, "/eth/v1/config/deposit_contract", &depositContractJson); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if depositContractJson.Data == nil {
|
||||
return nil, errors.New("deposit contract data is nil")
|
||||
}
|
||||
|
||||
depositContactAddress, err := hexutil.Decode(depositContractJson.Data.Address)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode deposit contract address `%s`", depositContractJson.Data.Address)
|
||||
}
|
||||
|
||||
return ðpb.Genesis{
|
||||
GenesisTime: ×tamppb.Timestamp{
|
||||
Seconds: genesisTime,
|
||||
},
|
||||
DepositContractAddress: depositContactAddress,
|
||||
GenesisValidatorsRoot: genesisValidatorRoot,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (c *beaconapiNodeClient) Version(ctx context.Context, _ *empty.Empty) (*ethpb.Version, error) {
|
||||
var versionResponse structs.GetVersionResponse
|
||||
if err := c.jsonRestHandler.Get(ctx, "/eth/v1/node/version", &versionResponse); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if versionResponse.Data == nil || versionResponse.Data.Version == "" {
|
||||
return nil, errors.New("empty version response")
|
||||
}
|
||||
|
||||
return ðpb.Version{
|
||||
Version: versionResponse.Data.Version,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (c *beaconapiNodeClient) Peers(ctx context.Context, in *empty.Empty) (*ethpb.Peers, error) {
|
||||
if c.fallbackClient != nil {
|
||||
return c.fallbackClient.Peers(ctx, in)
|
||||
}
|
||||
|
||||
// TODO: Implement me
|
||||
return nil, errors.New("beaconapiNodeClient.Peers is not implemented. To use a fallback client, pass a fallback client as the last argument of NewBeaconApiNodeClientWithFallback.")
|
||||
}
|
||||
|
||||
func (c *beaconapiNodeClient) IsHealthy(ctx context.Context) bool {
|
||||
return c.jsonRestHandler.Get(ctx, "/eth/v1/node/health", nil) == nil
|
||||
}
|
||||
|
||||
func (c *beaconapiNodeClient) HealthTracker() health.HealthTracker {
|
||||
return c.healthTracker
|
||||
}
|
||||
@@ -1,290 +0,0 @@
|
||||
package node
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/mock"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/assert"
|
||||
"go.uber.org/mock/gomock"
|
||||
"google.golang.org/protobuf/types/known/emptypb"
|
||||
"google.golang.org/protobuf/types/known/timestamppb"
|
||||
)
|
||||
|
||||
func TestGetGenesis(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
genesisResponse *structs.Genesis
|
||||
genesisError error
|
||||
depositContractResponse structs.GetDepositContractResponse
|
||||
depositContractError error
|
||||
queriesDepositContract bool
|
||||
expectedResponse *ethpb.Genesis
|
||||
expectedError string
|
||||
}{
|
||||
{
|
||||
name: "fails to get genesis",
|
||||
genesisError: errors.New("foo error"),
|
||||
expectedError: "failed to get genesis: foo error",
|
||||
},
|
||||
{
|
||||
name: "fails to decode genesis validator root",
|
||||
genesisResponse: &structs.Genesis{
|
||||
GenesisTime: "1",
|
||||
GenesisValidatorsRoot: "foo",
|
||||
},
|
||||
expectedError: "failed to decode genesis validator root `foo`",
|
||||
},
|
||||
{
|
||||
name: "fails to parse genesis time",
|
||||
genesisResponse: &structs.Genesis{
|
||||
GenesisTime: "foo",
|
||||
GenesisValidatorsRoot: hexutil.Encode([]byte{1}),
|
||||
},
|
||||
expectedError: "failed to parse genesis time `foo`",
|
||||
},
|
||||
{
|
||||
name: "fails to query contract information",
|
||||
genesisResponse: &structs.Genesis{
|
||||
GenesisTime: "1",
|
||||
GenesisValidatorsRoot: hexutil.Encode([]byte{2}),
|
||||
},
|
||||
depositContractError: errors.New("foo error"),
|
||||
queriesDepositContract: true,
|
||||
expectedError: "foo error",
|
||||
},
|
||||
{
|
||||
name: "fails to read nil deposit contract data",
|
||||
genesisResponse: &structs.Genesis{
|
||||
GenesisTime: "1",
|
||||
GenesisValidatorsRoot: hexutil.Encode([]byte{2}),
|
||||
},
|
||||
queriesDepositContract: true,
|
||||
depositContractResponse: structs.GetDepositContractResponse{
|
||||
Data: nil,
|
||||
},
|
||||
expectedError: "deposit contract data is nil",
|
||||
},
|
||||
{
|
||||
name: "fails to decode deposit contract address",
|
||||
genesisResponse: &structs.Genesis{
|
||||
GenesisTime: "1",
|
||||
GenesisValidatorsRoot: hexutil.Encode([]byte{2}),
|
||||
},
|
||||
queriesDepositContract: true,
|
||||
depositContractResponse: structs.GetDepositContractResponse{
|
||||
Data: &structs.DepositContractData{
|
||||
Address: "foo",
|
||||
},
|
||||
},
|
||||
expectedError: "failed to decode deposit contract address `foo`",
|
||||
},
|
||||
{
|
||||
name: "successfully retrieves genesis info",
|
||||
genesisResponse: &structs.Genesis{
|
||||
GenesisTime: "654812",
|
||||
GenesisValidatorsRoot: hexutil.Encode([]byte{2}),
|
||||
},
|
||||
queriesDepositContract: true,
|
||||
depositContractResponse: structs.GetDepositContractResponse{
|
||||
Data: &structs.DepositContractData{
|
||||
Address: hexutil.Encode([]byte{3}),
|
||||
},
|
||||
},
|
||||
expectedResponse: ðpb.Genesis{
|
||||
GenesisTime: ×tamppb.Timestamp{
|
||||
Seconds: 654812,
|
||||
},
|
||||
DepositContractAddress: []byte{3},
|
||||
GenesisValidatorsRoot: []byte{2},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
ctx := context.Background()
|
||||
|
||||
genesisProvider := mock.NewMockGenesisProvider(ctrl)
|
||||
genesisProvider.EXPECT().Genesis(
|
||||
gomock.Any(),
|
||||
).Return(
|
||||
testCase.genesisResponse,
|
||||
testCase.genesisError,
|
||||
)
|
||||
|
||||
depositContractJson := structs.GetDepositContractResponse{}
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
|
||||
if testCase.queriesDepositContract {
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
"/eth/v1/config/deposit_contract",
|
||||
&depositContractJson,
|
||||
).Return(
|
||||
testCase.depositContractError,
|
||||
).SetArg(
|
||||
2,
|
||||
testCase.depositContractResponse,
|
||||
)
|
||||
}
|
||||
|
||||
nodeClient := &beaconapiNodeClient{
|
||||
genesisProvider: genesisProvider,
|
||||
jsonRestHandler: jsonRestHandler,
|
||||
}
|
||||
response, err := nodeClient.Genesis(ctx, &emptypb.Empty{})
|
||||
|
||||
if testCase.expectedResponse == nil {
|
||||
assert.ErrorContains(t, testCase.expectedError, err)
|
||||
} else {
|
||||
assert.DeepEqual(t, testCase.expectedResponse, response)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetSyncStatus(t *testing.T) {
|
||||
const syncingEndpoint = "/eth/v1/node/syncing"
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
restEndpointResponse structs.SyncStatusResponse
|
||||
restEndpointError error
|
||||
expectedResponse *ethpb.SyncStatus
|
||||
expectedError string
|
||||
}{
|
||||
{
|
||||
name: "fails to query REST endpoint",
|
||||
restEndpointError: errors.New("foo error"),
|
||||
expectedError: "foo error",
|
||||
},
|
||||
{
|
||||
name: "returns nil syncing data",
|
||||
restEndpointResponse: structs.SyncStatusResponse{Data: nil},
|
||||
expectedError: "syncing data is nil",
|
||||
},
|
||||
{
|
||||
name: "returns false syncing status",
|
||||
restEndpointResponse: structs.SyncStatusResponse{
|
||||
Data: &structs.SyncStatusResponseData{
|
||||
IsSyncing: false,
|
||||
},
|
||||
},
|
||||
expectedResponse: ðpb.SyncStatus{
|
||||
Syncing: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "returns true syncing status",
|
||||
restEndpointResponse: structs.SyncStatusResponse{
|
||||
Data: &structs.SyncStatusResponseData{
|
||||
IsSyncing: true,
|
||||
},
|
||||
},
|
||||
expectedResponse: ðpb.SyncStatus{
|
||||
Syncing: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
ctx := context.Background()
|
||||
|
||||
syncingResponse := structs.SyncStatusResponse{}
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
syncingEndpoint,
|
||||
&syncingResponse,
|
||||
).Return(
|
||||
testCase.restEndpointError,
|
||||
).SetArg(
|
||||
2,
|
||||
testCase.restEndpointResponse,
|
||||
)
|
||||
|
||||
nodeClient := &beaconapiNodeClient{jsonRestHandler: jsonRestHandler}
|
||||
syncStatus, err := nodeClient.SyncStatus(ctx, &emptypb.Empty{})
|
||||
|
||||
if testCase.expectedResponse == nil {
|
||||
assert.ErrorContains(t, testCase.expectedError, err)
|
||||
} else {
|
||||
assert.DeepEqual(t, testCase.expectedResponse, syncStatus)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetVersion(t *testing.T) {
|
||||
const versionEndpoint = "/eth/v1/node/version"
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
restEndpointResponse structs.GetVersionResponse
|
||||
restEndpointError error
|
||||
expectedResponse *ethpb.Version
|
||||
expectedError string
|
||||
}{
|
||||
{
|
||||
name: "fails to query REST endpoint",
|
||||
restEndpointError: errors.New("foo error"),
|
||||
expectedError: "foo error",
|
||||
},
|
||||
{
|
||||
name: "returns nil version data",
|
||||
restEndpointResponse: structs.GetVersionResponse{Data: nil},
|
||||
expectedError: "empty version response",
|
||||
},
|
||||
{
|
||||
name: "returns proper version response",
|
||||
restEndpointResponse: structs.GetVersionResponse{
|
||||
Data: &structs.Version{
|
||||
Version: "prysm/local",
|
||||
},
|
||||
},
|
||||
expectedResponse: ðpb.Version{
|
||||
Version: "prysm/local",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
ctx := context.Background()
|
||||
|
||||
var versionResponse structs.GetVersionResponse
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
versionEndpoint,
|
||||
&versionResponse,
|
||||
).Return(
|
||||
testCase.restEndpointError,
|
||||
).SetArg(
|
||||
2,
|
||||
testCase.restEndpointResponse,
|
||||
)
|
||||
|
||||
nodeClient := &beaconapiNodeClient{jsonRestHandler: jsonRestHandler}
|
||||
version, err := nodeClient.Version(ctx, &emptypb.Empty{})
|
||||
|
||||
if testCase.expectedResponse == nil {
|
||||
assert.ErrorContains(t, testCase.expectedError, err)
|
||||
} else {
|
||||
assert.DeepEqual(t, testCase.expectedResponse, version)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library", "go_test")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"client.go",
|
||||
"grpc_client.go",
|
||||
"interfaces.go",
|
||||
"rest_client.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/client/beacon/prysm_api",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//api/client:go_default_library",
|
||||
"//api/client/apiutil:go_default_library",
|
||||
"//api/client/beacon:go_default_library",
|
||||
"//api/client/beacon/chain:go_default_library",
|
||||
"//api/client/beacon/node:go_default_library",
|
||||
"//api/server/structs:go_default_library",
|
||||
"//beacon-chain/rpc/eth/helpers:go_default_library",
|
||||
"//beacon-chain/state/state-native:go_default_library",
|
||||
"//config/features:go_default_library",
|
||||
"//consensus-types/primitives:go_default_library",
|
||||
"//consensus-types/validator:go_default_library",
|
||||
"//proto/eth/v1:go_default_library",
|
||||
"//proto/prysm/v1alpha1:go_default_library",
|
||||
"//validator/helpers:go_default_library",
|
||||
"@com_github_golang_protobuf//ptypes/empty",
|
||||
"@com_github_pkg_errors//:go_default_library",
|
||||
"@org_golang_google_grpc//:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "go_default_test",
|
||||
srcs = [
|
||||
"grpc_client_test.go",
|
||||
"rest_client_test.go",
|
||||
],
|
||||
embed = [":go_default_library"],
|
||||
deps = [
|
||||
"//api/client/beacon:go_default_library",
|
||||
"//api/client/beacon/mock:go_default_library",
|
||||
"//api/client/beacon/node:go_default_library",
|
||||
"//api/server/structs:go_default_library",
|
||||
"//config/params:go_default_library",
|
||||
"//consensus-types/primitives:go_default_library",
|
||||
"//consensus-types/validator:go_default_library",
|
||||
"//encoding/bytesutil:go_default_library",
|
||||
"//proto/prysm/v1alpha1:go_default_library",
|
||||
"//testing/require:go_default_library",
|
||||
"//testing/util:go_default_library",
|
||||
"@org_uber_go_mock//gomock:go_default_library",
|
||||
],
|
||||
)
|
||||
@@ -1,30 +0,0 @@
|
||||
package prysm_api
|
||||
|
||||
import (
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/chain"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/node"
|
||||
"github.com/prysmaticlabs/prysm/v5/config/features"
|
||||
validatorHelpers "github.com/prysmaticlabs/prysm/v5/validator/helpers"
|
||||
"google.golang.org/grpc"
|
||||
)
|
||||
|
||||
func NewClient(validatorConn validatorHelpers.NodeConnection, jsonRestHandler client.JsonRestHandler) Client {
|
||||
if features.Get().EnableBeaconRESTApi {
|
||||
return NewPrysmChainRestClient(jsonRestHandler, node.NewClient(validatorConn, jsonRestHandler))
|
||||
} else {
|
||||
return NewGrpcPrysmChainClient(validatorConn.GetGrpcClientConn())
|
||||
}
|
||||
}
|
||||
|
||||
// NewPrysmChainClient returns implementation of Client.
|
||||
func NewPrysmChainRestClient(jsonRestHandler client.JsonRestHandler, nodeClient node.Client) Client {
|
||||
return prysmChainClient{
|
||||
jsonRestHandler: jsonRestHandler,
|
||||
nodeClient: nodeClient,
|
||||
}
|
||||
}
|
||||
|
||||
func NewGrpcPrysmChainClient(cc grpc.ClientConnInterface) Client {
|
||||
return &grpcPrysmChainClient{chainClient: chain.NewGrpcChainClient(cc)}
|
||||
}
|
||||
@@ -1,96 +0,0 @@
|
||||
package prysm_api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"sort"
|
||||
|
||||
"github.com/golang/protobuf/ptypes/empty"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/chain"
|
||||
"github.com/prysmaticlabs/prysm/v5/beacon-chain/rpc/eth/helpers"
|
||||
statenative "github.com/prysmaticlabs/prysm/v5/beacon-chain/state/state-native"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/validator"
|
||||
eth "github.com/prysmaticlabs/prysm/v5/proto/eth/v1"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
)
|
||||
|
||||
type grpcPrysmChainClient struct {
|
||||
chainClient chain.Client
|
||||
}
|
||||
|
||||
func (g grpcPrysmChainClient) ValidatorCount(ctx context.Context, _ string, statuses []validator.Status) ([]beacon.ValidatorCount, error) {
|
||||
resp, err := g.chainClient.Validators(ctx, ðpb.ListValidatorsRequest{PageSize: 0})
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "list validators failed")
|
||||
}
|
||||
|
||||
var vals []*ethpb.Validator
|
||||
for _, val := range resp.ValidatorList {
|
||||
vals = append(vals, val.Validator)
|
||||
}
|
||||
|
||||
head, err := g.chainClient.ChainHead(ctx, &empty.Empty{})
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "get chain head")
|
||||
}
|
||||
|
||||
if len(statuses) == 0 {
|
||||
for _, val := range eth.ValidatorStatus_value {
|
||||
statuses = append(statuses, validator.Status(val))
|
||||
}
|
||||
}
|
||||
|
||||
valCount, err := validatorCountByStatus(vals, statuses, head.HeadEpoch)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "validator count by status")
|
||||
}
|
||||
|
||||
return valCount, nil
|
||||
}
|
||||
|
||||
// validatorCountByStatus returns a slice of validator count for each status in the given epoch.
|
||||
func validatorCountByStatus(validators []*ethpb.Validator, statuses []validator.Status, epoch primitives.Epoch) ([]beacon.ValidatorCount, error) {
|
||||
countByStatus := make(map[validator.Status]uint64)
|
||||
for _, val := range validators {
|
||||
readOnlyVal, err := statenative.NewValidator(val)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not convert validator: %w", err)
|
||||
}
|
||||
valStatus, err := helpers.ValidatorStatus(readOnlyVal, epoch)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not get validator status: %w", err)
|
||||
}
|
||||
valSubStatus, err := helpers.ValidatorSubStatus(readOnlyVal, epoch)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not get validator sub status: %w", err)
|
||||
}
|
||||
|
||||
for _, status := range statuses {
|
||||
if valStatus == status || valSubStatus == status {
|
||||
countByStatus[status]++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var resp []beacon.ValidatorCount
|
||||
for status, count := range countByStatus {
|
||||
resp = append(resp, beacon.ValidatorCount{
|
||||
Status: status.String(),
|
||||
Count: count,
|
||||
})
|
||||
}
|
||||
|
||||
// Sort the response slice according to status strings for deterministic ordering of validator count response.
|
||||
sort.Slice(resp, func(i, j int) bool {
|
||||
return resp[i].Status < resp[j].Status
|
||||
})
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (c *grpcPrysmChainClient) ValidatorPerformance(ctx context.Context, in *ethpb.ValidatorPerformanceRequest) (*ethpb.ValidatorPerformanceResponse, error) {
|
||||
return c.chainClient.ValidatorPerformance(ctx, in)
|
||||
}
|
||||
@@ -1,326 +0,0 @@
|
||||
package prysm_api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/mock"
|
||||
"github.com/prysmaticlabs/prysm/v5/config/params"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/validator"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/util"
|
||||
"go.uber.org/mock/gomock"
|
||||
)
|
||||
|
||||
func TestGRPC_GetValidatorCount(t *testing.T) {
|
||||
st, _ := util.DeterministicGenesisState(t, 10)
|
||||
farFutureEpoch := params.BeaconConfig().FarFutureEpoch
|
||||
validators := []*ethpb.Validator{
|
||||
// Pending initialized.
|
||||
{
|
||||
ActivationEpoch: farFutureEpoch,
|
||||
ActivationEligibilityEpoch: farFutureEpoch,
|
||||
ExitEpoch: farFutureEpoch,
|
||||
WithdrawableEpoch: farFutureEpoch,
|
||||
},
|
||||
// Pending queued.
|
||||
{
|
||||
ActivationEpoch: 10,
|
||||
ActivationEligibilityEpoch: 4,
|
||||
ExitEpoch: farFutureEpoch,
|
||||
WithdrawableEpoch: farFutureEpoch,
|
||||
},
|
||||
// Active ongoing.
|
||||
{
|
||||
ActivationEpoch: 0,
|
||||
ExitEpoch: farFutureEpoch,
|
||||
},
|
||||
// Active slashed.
|
||||
{
|
||||
ActivationEpoch: 0,
|
||||
ExitEpoch: 30,
|
||||
Slashed: true,
|
||||
WithdrawableEpoch: 50,
|
||||
},
|
||||
// Active exiting.
|
||||
{
|
||||
ActivationEpoch: 0,
|
||||
ExitEpoch: 30,
|
||||
Slashed: false,
|
||||
WithdrawableEpoch: 50,
|
||||
},
|
||||
// Exit slashed (at epoch 35).
|
||||
{
|
||||
ActivationEpoch: 3,
|
||||
ExitEpoch: 30,
|
||||
WithdrawableEpoch: 50,
|
||||
Slashed: true,
|
||||
},
|
||||
// Exit unslashed (at epoch 35).
|
||||
{
|
||||
ActivationEpoch: 3,
|
||||
ExitEpoch: 30,
|
||||
WithdrawableEpoch: 50,
|
||||
Slashed: false,
|
||||
},
|
||||
// Withdrawable (at epoch 45).
|
||||
{
|
||||
ActivationEpoch: 3,
|
||||
ExitEpoch: 30,
|
||||
WithdrawableEpoch: 40,
|
||||
EffectiveBalance: params.BeaconConfig().MaxEffectiveBalance,
|
||||
Slashed: false,
|
||||
},
|
||||
// Withdrawal done (at epoch 45).
|
||||
{
|
||||
ActivationEpoch: 3,
|
||||
ExitEpoch: 30,
|
||||
WithdrawableEpoch: 40,
|
||||
EffectiveBalance: 0,
|
||||
Slashed: false,
|
||||
},
|
||||
}
|
||||
for _, v := range validators {
|
||||
require.NoError(t, st.AppendValidator(v))
|
||||
require.NoError(t, st.AppendBalance(params.BeaconConfig().MaxEffectiveBalance))
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
statuses []string
|
||||
currentEpoch int
|
||||
expectedResponse []beacon.ValidatorCount
|
||||
}{
|
||||
{
|
||||
name: "Head count active validators",
|
||||
statuses: []string{"active"},
|
||||
expectedResponse: []beacon.ValidatorCount{
|
||||
{
|
||||
Status: "active",
|
||||
Count: 13,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Head count active ongoing validators",
|
||||
statuses: []string{"active_ongoing"},
|
||||
expectedResponse: []beacon.ValidatorCount{
|
||||
{
|
||||
Status: "active_ongoing",
|
||||
Count: 11,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Head count active exiting validators",
|
||||
statuses: []string{"active_exiting"},
|
||||
expectedResponse: []beacon.ValidatorCount{
|
||||
{
|
||||
Status: "active_exiting",
|
||||
Count: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Head count active slashed validators",
|
||||
statuses: []string{"active_slashed"},
|
||||
expectedResponse: []beacon.ValidatorCount{
|
||||
{
|
||||
Status: "active_slashed",
|
||||
Count: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Head count pending validators",
|
||||
statuses: []string{"pending"},
|
||||
expectedResponse: []beacon.ValidatorCount{
|
||||
{
|
||||
Status: "pending",
|
||||
Count: 6,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Head count pending initialized validators",
|
||||
statuses: []string{"pending_initialized"},
|
||||
expectedResponse: []beacon.ValidatorCount{
|
||||
{
|
||||
Status: "pending_initialized",
|
||||
Count: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Head count pending queued validators",
|
||||
statuses: []string{"pending_queued"},
|
||||
expectedResponse: []beacon.ValidatorCount{
|
||||
{
|
||||
Status: "pending_queued",
|
||||
Count: 5,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Head count exited validators",
|
||||
statuses: []string{"exited"},
|
||||
currentEpoch: 35,
|
||||
expectedResponse: []beacon.ValidatorCount{
|
||||
{
|
||||
Status: "exited",
|
||||
Count: 6,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Head count exited slashed validators",
|
||||
statuses: []string{"exited_slashed"},
|
||||
currentEpoch: 35,
|
||||
expectedResponse: []beacon.ValidatorCount{
|
||||
{
|
||||
Status: "exited_slashed",
|
||||
Count: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Head count exited unslashed validators",
|
||||
statuses: []string{"exited_unslashed"},
|
||||
currentEpoch: 35,
|
||||
expectedResponse: []beacon.ValidatorCount{
|
||||
{
|
||||
Status: "exited_unslashed",
|
||||
Count: 4,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Head count withdrawal validators",
|
||||
statuses: []string{"withdrawal"},
|
||||
currentEpoch: 45,
|
||||
expectedResponse: []beacon.ValidatorCount{
|
||||
{
|
||||
Status: "withdrawal",
|
||||
Count: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Head count withdrawal possible validators",
|
||||
statuses: []string{"withdrawal_possible"},
|
||||
currentEpoch: 45,
|
||||
expectedResponse: []beacon.ValidatorCount{
|
||||
{
|
||||
Status: "withdrawal_possible",
|
||||
Count: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Head count withdrawal done validators",
|
||||
statuses: []string{"withdrawal_done"},
|
||||
currentEpoch: 45,
|
||||
expectedResponse: []beacon.ValidatorCount{
|
||||
{
|
||||
Status: "withdrawal_done",
|
||||
Count: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Head count active and pending validators",
|
||||
statuses: []string{"active", "pending"},
|
||||
expectedResponse: []beacon.ValidatorCount{
|
||||
{
|
||||
Status: "active",
|
||||
Count: 13,
|
||||
},
|
||||
{
|
||||
Status: "pending",
|
||||
Count: 6,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Head count of ALL validators",
|
||||
expectedResponse: []beacon.ValidatorCount{
|
||||
{
|
||||
Status: "active",
|
||||
Count: 13,
|
||||
},
|
||||
{
|
||||
Status: "active_exiting",
|
||||
Count: 1,
|
||||
},
|
||||
{
|
||||
Status: "active_ongoing",
|
||||
Count: 11,
|
||||
},
|
||||
{
|
||||
Status: "active_slashed",
|
||||
Count: 1,
|
||||
},
|
||||
{
|
||||
Status: "pending",
|
||||
Count: 6,
|
||||
},
|
||||
{
|
||||
Status: "pending_initialized",
|
||||
Count: 1,
|
||||
},
|
||||
{
|
||||
Status: "pending_queued",
|
||||
Count: 5,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
listValidatorResp := ðpb.Validators{}
|
||||
for _, val := range st.Validators() {
|
||||
listValidatorResp.ValidatorList = append(listValidatorResp.ValidatorList, ðpb.Validators_ValidatorContainer{
|
||||
Validator: val,
|
||||
})
|
||||
}
|
||||
|
||||
chainClient := mock.NewMockChainClient(ctrl)
|
||||
chainClient.EXPECT().Validators(
|
||||
gomock.Any(),
|
||||
gomock.Any(),
|
||||
).Return(
|
||||
listValidatorResp,
|
||||
nil,
|
||||
)
|
||||
|
||||
chainClient.EXPECT().ChainHead(
|
||||
gomock.Any(),
|
||||
gomock.Any(),
|
||||
).Return(
|
||||
ðpb.ChainHead{HeadEpoch: primitives.Epoch(test.currentEpoch)},
|
||||
nil,
|
||||
)
|
||||
|
||||
prysmBeaconChainClient := &grpcPrysmChainClient{
|
||||
chainClient: chainClient,
|
||||
}
|
||||
|
||||
var statuses []validator.Status
|
||||
for _, status := range test.statuses {
|
||||
ok, valStatus := validator.StatusFromString(status)
|
||||
require.Equal(t, true, ok)
|
||||
statuses = append(statuses, valStatus)
|
||||
}
|
||||
vcCountResp, err := prysmBeaconChainClient.ValidatorCount(context.Background(), "", statuses)
|
||||
require.NoError(t, err)
|
||||
require.DeepEqual(t, test.expectedResponse, vcCountResp)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
package prysm_api
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/validator"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
)
|
||||
|
||||
// Client defines an interface required to implement all the prysm specific custom endpoints.
|
||||
type Client interface {
|
||||
ValidatorCount(context.Context, string, []validator.Status) ([]beacon.ValidatorCount, error)
|
||||
ValidatorPerformance(context.Context, *ethpb.ValidatorPerformanceRequest) (*ethpb.ValidatorPerformanceResponse, error)
|
||||
}
|
||||
@@ -1,108 +0,0 @@
|
||||
package prysm_api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
neturl "net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/apiutil"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/node"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
validator2 "github.com/prysmaticlabs/prysm/v5/consensus-types/validator"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
)
|
||||
|
||||
type prysmChainClient struct {
|
||||
jsonRestHandler client.JsonRestHandler
|
||||
nodeClient node.Client
|
||||
}
|
||||
|
||||
func (c prysmChainClient) ValidatorCount(ctx context.Context, stateID string, statuses []validator2.Status) ([]beacon.ValidatorCount, error) {
|
||||
// Check node version for prysm beacon node as it is a custom endpoint for prysm beacon node.
|
||||
nodeVersion, err := c.nodeClient.Version(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get node version")
|
||||
}
|
||||
|
||||
if !strings.Contains(strings.ToLower(nodeVersion.Version), "prysm") {
|
||||
return nil, client.ErrNotSupported
|
||||
}
|
||||
|
||||
queryParams := neturl.Values{}
|
||||
for _, status := range statuses {
|
||||
queryParams.Add("status", status.String())
|
||||
}
|
||||
|
||||
queryUrl := apiutil.BuildURL(fmt.Sprintf("/eth/v1/beacon/states/%s/validator_count", stateID), queryParams)
|
||||
|
||||
var validatorCountResponse structs.GetValidatorCountResponse
|
||||
if err = c.jsonRestHandler.Get(ctx, queryUrl, &validatorCountResponse); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if validatorCountResponse.Data == nil {
|
||||
return nil, errors.New("validator count data is nil")
|
||||
}
|
||||
|
||||
if len(statuses) != 0 && len(statuses) != len(validatorCountResponse.Data) {
|
||||
return nil, errors.New("mismatch between validator count data and the number of statuses provided")
|
||||
}
|
||||
|
||||
var resp []beacon.ValidatorCount
|
||||
for _, vc := range validatorCountResponse.Data {
|
||||
count, err := strconv.ParseUint(vc.Count, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse validator count %s", vc.Count)
|
||||
}
|
||||
|
||||
resp = append(resp, beacon.ValidatorCount{
|
||||
Status: vc.Status,
|
||||
Count: count,
|
||||
})
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (c prysmChainClient) ValidatorPerformance(ctx context.Context, in *ethpb.ValidatorPerformanceRequest) (*ethpb.ValidatorPerformanceResponse, error) {
|
||||
// Check node version for prysm beacon node as it is a custom endpoint for prysm beacon node.
|
||||
nodeVersion, err := c.nodeClient.Version(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get node version")
|
||||
}
|
||||
|
||||
if !strings.Contains(strings.ToLower(nodeVersion.Version), "prysm") {
|
||||
return nil, client.ErrNotSupported
|
||||
}
|
||||
|
||||
request, err := json.Marshal(structs.GetValidatorPerformanceRequest{
|
||||
PublicKeys: in.PublicKeys,
|
||||
Indices: in.Indices,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to marshal request")
|
||||
}
|
||||
resp := &structs.GetValidatorPerformanceResponse{}
|
||||
if err = c.jsonRestHandler.Post(ctx, "/prysm/validators/performance", nil, bytes.NewBuffer(request), resp); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ðpb.ValidatorPerformanceResponse{
|
||||
CurrentEffectiveBalances: resp.CurrentEffectiveBalances,
|
||||
CorrectlyVotedSource: resp.CorrectlyVotedSource,
|
||||
CorrectlyVotedTarget: resp.CorrectlyVotedTarget,
|
||||
CorrectlyVotedHead: resp.CorrectlyVotedHead,
|
||||
BalancesBeforeEpochTransition: resp.BalancesBeforeEpochTransition,
|
||||
BalancesAfterEpochTransition: resp.BalancesAfterEpochTransition,
|
||||
MissingValidators: resp.MissingValidators,
|
||||
PublicKeys: resp.PublicKeys,
|
||||
InactivityScores: resp.InactivityScores,
|
||||
}, nil
|
||||
}
|
||||
@@ -1,221 +0,0 @@
|
||||
package prysm_api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/mock"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/node"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/validator"
|
||||
"github.com/prysmaticlabs/prysm/v5/encoding/bytesutil"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
"go.uber.org/mock/gomock"
|
||||
)
|
||||
|
||||
func TestGetValidatorCount(t *testing.T) {
|
||||
const nodeVersion = "prysm/v0.0.1"
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
versionEndpointError error
|
||||
validatorCountEndpointError error
|
||||
versionResponse structs.GetVersionResponse
|
||||
validatorCountResponse structs.GetValidatorCountResponse
|
||||
validatorCountCalled int
|
||||
expectedResponse []beacon.ValidatorCount
|
||||
expectedError string
|
||||
}{
|
||||
{
|
||||
name: "success",
|
||||
versionResponse: structs.GetVersionResponse{
|
||||
Data: &structs.Version{Version: nodeVersion},
|
||||
},
|
||||
validatorCountResponse: structs.GetValidatorCountResponse{
|
||||
ExecutionOptimistic: "false",
|
||||
Finalized: "true",
|
||||
Data: []*structs.ValidatorCount{
|
||||
{
|
||||
Status: "active",
|
||||
Count: "10",
|
||||
},
|
||||
},
|
||||
},
|
||||
validatorCountCalled: 1,
|
||||
expectedResponse: []beacon.ValidatorCount{
|
||||
{
|
||||
Status: "active",
|
||||
Count: 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "not supported beacon node",
|
||||
versionResponse: structs.GetVersionResponse{
|
||||
Data: &structs.Version{Version: "lighthouse/v0.0.1"},
|
||||
},
|
||||
expectedError: "endpoint not supported",
|
||||
},
|
||||
{
|
||||
name: "fails to get version",
|
||||
versionEndpointError: errors.New("foo error"),
|
||||
expectedError: "failed to get node version",
|
||||
},
|
||||
{
|
||||
name: "fails to get validator count",
|
||||
versionResponse: structs.GetVersionResponse{
|
||||
Data: &structs.Version{Version: nodeVersion},
|
||||
},
|
||||
validatorCountEndpointError: errors.New("foo error"),
|
||||
validatorCountCalled: 1,
|
||||
expectedError: "foo error",
|
||||
},
|
||||
{
|
||||
name: "nil validator count data",
|
||||
versionResponse: structs.GetVersionResponse{
|
||||
Data: &structs.Version{Version: nodeVersion},
|
||||
},
|
||||
validatorCountResponse: structs.GetValidatorCountResponse{
|
||||
ExecutionOptimistic: "false",
|
||||
Finalized: "true",
|
||||
Data: nil,
|
||||
},
|
||||
validatorCountCalled: 1,
|
||||
expectedError: "validator count data is nil",
|
||||
},
|
||||
{
|
||||
name: "invalid validator count",
|
||||
versionResponse: structs.GetVersionResponse{
|
||||
Data: &structs.Version{Version: nodeVersion},
|
||||
},
|
||||
validatorCountResponse: structs.GetValidatorCountResponse{
|
||||
ExecutionOptimistic: "false",
|
||||
Finalized: "true",
|
||||
Data: []*structs.ValidatorCount{
|
||||
{
|
||||
Status: "active",
|
||||
Count: "10",
|
||||
},
|
||||
{
|
||||
Status: "exited",
|
||||
Count: "10",
|
||||
},
|
||||
},
|
||||
},
|
||||
validatorCountCalled: 1,
|
||||
expectedError: "mismatch between validator count data and the number of statuses provided",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
|
||||
// Expect node version endpoint call.
|
||||
var nodeVersionResponse structs.GetVersionResponse
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
"/eth/v1/node/version",
|
||||
&nodeVersionResponse,
|
||||
).Return(
|
||||
test.versionEndpointError,
|
||||
).SetArg(
|
||||
2,
|
||||
test.versionResponse,
|
||||
)
|
||||
|
||||
var validatorCountResponse structs.GetValidatorCountResponse
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
"/eth/v1/beacon/states/head/validator_count?status=active",
|
||||
&validatorCountResponse,
|
||||
).Return(
|
||||
test.validatorCountEndpointError,
|
||||
).SetArg(
|
||||
2,
|
||||
test.validatorCountResponse,
|
||||
).Times(test.validatorCountCalled)
|
||||
|
||||
// Type assertion.
|
||||
var client Client = &prysmChainClient{
|
||||
nodeClient: node.NewNodeClientWithFallback(jsonRestHandler, nil),
|
||||
jsonRestHandler: jsonRestHandler,
|
||||
}
|
||||
|
||||
countResponse, err := client.ValidatorCount(ctx, "head", []validator.Status{validator.Active})
|
||||
|
||||
if len(test.expectedResponse) == 0 {
|
||||
require.ErrorContains(t, test.expectedError, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
require.DeepEqual(t, test.expectedResponse, countResponse)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_beaconApiBeaconChainClient_GetValidatorPerformance(t *testing.T) {
|
||||
const nodeVersion = "prysm/v0.0.1"
|
||||
publicKeys := [][48]byte{
|
||||
bytesutil.ToBytes48([]byte{1}),
|
||||
bytesutil.ToBytes48([]byte{2}),
|
||||
bytesutil.ToBytes48([]byte{3}),
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
request, err := json.Marshal(structs.GetValidatorPerformanceRequest{
|
||||
PublicKeys: [][]byte{publicKeys[0][:], publicKeys[2][:], publicKeys[1][:]},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
// Expect node version endpoint call.
|
||||
var nodeVersionResponse structs.GetVersionResponse
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
"/eth/v1/node/version",
|
||||
&nodeVersionResponse,
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
structs.GetVersionResponse{
|
||||
Data: &structs.Version{Version: nodeVersion},
|
||||
},
|
||||
)
|
||||
|
||||
wantResponse := &structs.GetValidatorPerformanceResponse{}
|
||||
want := ðpb.ValidatorPerformanceResponse{}
|
||||
|
||||
jsonRestHandler.EXPECT().Post(
|
||||
gomock.Any(),
|
||||
"/prysm/validators/performance",
|
||||
nil,
|
||||
bytes.NewBuffer(request),
|
||||
wantResponse,
|
||||
).Return(
|
||||
nil,
|
||||
)
|
||||
|
||||
var client Client = &prysmChainClient{
|
||||
nodeClient: node.NewNodeClientWithFallback(jsonRestHandler, nil),
|
||||
jsonRestHandler: jsonRestHandler,
|
||||
}
|
||||
|
||||
got, err := client.ValidatorPerformance(ctx, ðpb.ValidatorPerformanceRequest{
|
||||
PublicKeys: [][]byte{publicKeys[0][:], publicKeys[2][:], publicKeys[1][:]},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.DeepEqual(t, want.PublicKeys, got.PublicKeys)
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library", "go_test")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"duties.go",
|
||||
"genesis.go",
|
||||
"interfaces.go",
|
||||
"providers.go",
|
||||
"state_validators.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/client/beacon/shared_providers",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//api/client:go_default_library",
|
||||
"//api/client/apiutil:go_default_library",
|
||||
"//api/server/structs:go_default_library",
|
||||
"//consensus-types/primitives:go_default_library",
|
||||
"@com_github_pkg_errors//:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "go_default_test",
|
||||
srcs = [
|
||||
"duties_test.go",
|
||||
"genesis_test.go",
|
||||
"state_validators_test.go",
|
||||
],
|
||||
embed = [":go_default_library"],
|
||||
deps = [
|
||||
"//api/client/apiutil:go_default_library",
|
||||
"//api/client/beacon/mock:go_default_library",
|
||||
"//api/server/structs:go_default_library",
|
||||
"//consensus-types/primitives:go_default_library",
|
||||
"//testing/assert:go_default_library",
|
||||
"//testing/require:go_default_library",
|
||||
"@com_github_ethereum_go_ethereum//common/hexutil:go_default_library",
|
||||
"@com_github_pkg_errors//:go_default_library",
|
||||
"@org_uber_go_mock//gomock:go_default_library",
|
||||
],
|
||||
)
|
||||
@@ -1,132 +0,0 @@
|
||||
package shared_providers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strconv"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/apiutil"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
)
|
||||
|
||||
type dutiesProvider struct {
|
||||
jsonRestHandler client.JsonRestHandler
|
||||
}
|
||||
|
||||
// Committees retrieves the committees for the given epoch
|
||||
func (c dutiesProvider) Committees(ctx context.Context, epoch primitives.Epoch) ([]*structs.Committee, error) {
|
||||
committeeParams := url.Values{}
|
||||
committeeParams.Add("epoch", strconv.FormatUint(uint64(epoch), 10))
|
||||
committeesRequest := apiutil.BuildURL("/eth/v1/beacon/states/head/committees", committeeParams)
|
||||
|
||||
var stateCommittees structs.GetCommitteesResponse
|
||||
if err := c.jsonRestHandler.Get(ctx, committeesRequest, &stateCommittees); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if stateCommittees.Data == nil {
|
||||
return nil, errors.New("state committees data is nil")
|
||||
}
|
||||
|
||||
for index, committee := range stateCommittees.Data {
|
||||
if committee == nil {
|
||||
return nil, errors.Errorf("committee at index `%d` is nil", index)
|
||||
}
|
||||
}
|
||||
|
||||
return stateCommittees.Data, nil
|
||||
}
|
||||
|
||||
// AttesterDuties retrieves the attester duties for the given epoch and validatorIndices
|
||||
func (c dutiesProvider) AttesterDuties(ctx context.Context, epoch primitives.Epoch, validatorIndices []primitives.ValidatorIndex) ([]*structs.AttesterDuty, error) {
|
||||
jsonValidatorIndices := make([]string, len(validatorIndices))
|
||||
for index, validatorIndex := range validatorIndices {
|
||||
jsonValidatorIndices[index] = strconv.FormatUint(uint64(validatorIndex), 10)
|
||||
}
|
||||
|
||||
validatorIndicesBytes, err := json.Marshal(jsonValidatorIndices)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to marshal validator indices")
|
||||
}
|
||||
|
||||
attesterDuties := &structs.GetAttesterDutiesResponse{}
|
||||
if err = c.jsonRestHandler.Post(
|
||||
ctx,
|
||||
fmt.Sprintf("/eth/v1/validator/duties/attester/%d", epoch),
|
||||
nil,
|
||||
bytes.NewBuffer(validatorIndicesBytes),
|
||||
attesterDuties,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for index, attesterDuty := range attesterDuties.Data {
|
||||
if attesterDuty == nil {
|
||||
return nil, errors.Errorf("attester duty at index `%d` is nil", index)
|
||||
}
|
||||
}
|
||||
|
||||
return attesterDuties.Data, nil
|
||||
}
|
||||
|
||||
// ProposerDuties retrieves the proposer duties for the given epoch
|
||||
func (c dutiesProvider) ProposerDuties(ctx context.Context, epoch primitives.Epoch) ([]*structs.ProposerDuty, error) {
|
||||
proposerDuties := structs.GetProposerDutiesResponse{}
|
||||
if err := c.jsonRestHandler.Get(ctx, fmt.Sprintf("/eth/v1/validator/duties/proposer/%d", epoch), &proposerDuties); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if proposerDuties.Data == nil {
|
||||
return nil, errors.New("proposer duties data is nil")
|
||||
}
|
||||
|
||||
for index, proposerDuty := range proposerDuties.Data {
|
||||
if proposerDuty == nil {
|
||||
return nil, errors.Errorf("proposer duty at index `%d` is nil", index)
|
||||
}
|
||||
}
|
||||
|
||||
return proposerDuties.Data, nil
|
||||
}
|
||||
|
||||
// SyncDuties retrieves the sync committee duties for the given epoch and validatorIndices
|
||||
func (c dutiesProvider) SyncDuties(ctx context.Context, epoch primitives.Epoch, validatorIndices []primitives.ValidatorIndex) ([]*structs.SyncCommitteeDuty, error) {
|
||||
jsonValidatorIndices := make([]string, len(validatorIndices))
|
||||
for index, validatorIndex := range validatorIndices {
|
||||
jsonValidatorIndices[index] = strconv.FormatUint(uint64(validatorIndex), 10)
|
||||
}
|
||||
|
||||
validatorIndicesBytes, err := json.Marshal(jsonValidatorIndices)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to marshal validator indices")
|
||||
}
|
||||
|
||||
syncDuties := structs.GetSyncCommitteeDutiesResponse{}
|
||||
if err = c.jsonRestHandler.Post(
|
||||
ctx,
|
||||
fmt.Sprintf("/eth/v1/validator/duties/sync/%d", epoch),
|
||||
nil,
|
||||
bytes.NewBuffer(validatorIndicesBytes),
|
||||
&syncDuties,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if syncDuties.Data == nil {
|
||||
return nil, errors.New("sync duties data is nil")
|
||||
}
|
||||
|
||||
for index, syncDuty := range syncDuties.Data {
|
||||
if syncDuty == nil {
|
||||
return nil, errors.Errorf("sync duty at index `%d` is nil", index)
|
||||
}
|
||||
}
|
||||
|
||||
return syncDuties.Data, nil
|
||||
}
|
||||
@@ -1,508 +0,0 @@
|
||||
package shared_providers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/mock"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
"go.uber.org/mock/gomock"
|
||||
)
|
||||
|
||||
const getAttesterDutiesTestEndpoint = "/eth/v1/validator/duties/attester"
|
||||
const getProposerDutiesTestEndpoint = "/eth/v1/validator/duties/proposer"
|
||||
const getSyncDutiesTestEndpoint = "/eth/v1/validator/duties/sync"
|
||||
const getCommitteesTestEndpoint = "/eth/v1/beacon/states/head/committees"
|
||||
|
||||
func TestGetAttesterDuties_Valid(t *testing.T) {
|
||||
stringValidatorIndices := []string{"2", "9"}
|
||||
const epoch = primitives.Epoch(1)
|
||||
|
||||
validatorIndicesBytes, err := json.Marshal(stringValidatorIndices)
|
||||
require.NoError(t, err)
|
||||
|
||||
expectedAttesterDuties := structs.GetAttesterDutiesResponse{
|
||||
Data: []*structs.AttesterDuty{
|
||||
{
|
||||
Pubkey: hexutil.Encode([]byte{1}),
|
||||
ValidatorIndex: "2",
|
||||
CommitteeIndex: "3",
|
||||
CommitteeLength: "4",
|
||||
CommitteesAtSlot: "5",
|
||||
ValidatorCommitteeIndex: "6",
|
||||
Slot: "7",
|
||||
},
|
||||
{
|
||||
Pubkey: hexutil.Encode([]byte{8}),
|
||||
ValidatorIndex: "9",
|
||||
CommitteeIndex: "10",
|
||||
CommitteeLength: "11",
|
||||
CommitteesAtSlot: "12",
|
||||
ValidatorCommitteeIndex: "13",
|
||||
Slot: "14",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
validatorIndices := []primitives.ValidatorIndex{2, 9}
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Post(
|
||||
gomock.Any(),
|
||||
fmt.Sprintf("%s/%d", getAttesterDutiesTestEndpoint, epoch),
|
||||
nil,
|
||||
bytes.NewBuffer(validatorIndicesBytes),
|
||||
&structs.GetAttesterDutiesResponse{},
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
4,
|
||||
expectedAttesterDuties,
|
||||
).Times(1)
|
||||
|
||||
dutiesProvider := &dutiesProvider{jsonRestHandler: jsonRestHandler}
|
||||
attesterDuties, err := dutiesProvider.AttesterDuties(ctx, epoch, validatorIndices)
|
||||
require.NoError(t, err)
|
||||
assert.DeepEqual(t, expectedAttesterDuties.Data, attesterDuties)
|
||||
}
|
||||
|
||||
func TestGetAttesterDuties_HttpError(t *testing.T) {
|
||||
const epoch = primitives.Epoch(1)
|
||||
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Post(
|
||||
gomock.Any(),
|
||||
fmt.Sprintf("%s/%d", getAttesterDutiesTestEndpoint, epoch),
|
||||
gomock.Any(),
|
||||
gomock.Any(),
|
||||
gomock.Any(),
|
||||
).Return(
|
||||
errors.New("foo error"),
|
||||
).Times(1)
|
||||
|
||||
dutiesProvider := &dutiesProvider{jsonRestHandler: jsonRestHandler}
|
||||
_, err := dutiesProvider.AttesterDuties(ctx, epoch, nil)
|
||||
assert.ErrorContains(t, "foo error", err)
|
||||
}
|
||||
|
||||
func TestGetAttesterDuties_NilAttesterDuty(t *testing.T) {
|
||||
const epoch = primitives.Epoch(1)
|
||||
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Post(
|
||||
gomock.Any(),
|
||||
fmt.Sprintf("%s/%d", getAttesterDutiesTestEndpoint, epoch),
|
||||
gomock.Any(),
|
||||
gomock.Any(),
|
||||
gomock.Any(),
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
4,
|
||||
structs.GetAttesterDutiesResponse{
|
||||
Data: []*structs.AttesterDuty{nil},
|
||||
},
|
||||
).Times(1)
|
||||
|
||||
dutiesProvider := &dutiesProvider{jsonRestHandler: jsonRestHandler}
|
||||
_, err := dutiesProvider.AttesterDuties(ctx, epoch, nil)
|
||||
assert.ErrorContains(t, "attester duty at index `0` is nil", err)
|
||||
}
|
||||
|
||||
func TestGetProposerDuties_Valid(t *testing.T) {
|
||||
const epoch = primitives.Epoch(1)
|
||||
|
||||
expectedProposerDuties := structs.GetProposerDutiesResponse{
|
||||
Data: []*structs.ProposerDuty{
|
||||
{
|
||||
Pubkey: hexutil.Encode([]byte{1}),
|
||||
ValidatorIndex: "2",
|
||||
Slot: "3",
|
||||
},
|
||||
{
|
||||
Pubkey: hexutil.Encode([]byte{4}),
|
||||
ValidatorIndex: "5",
|
||||
Slot: "6",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
fmt.Sprintf("%s/%d", getProposerDutiesTestEndpoint, epoch),
|
||||
&structs.GetProposerDutiesResponse{},
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
expectedProposerDuties,
|
||||
).Times(1)
|
||||
|
||||
dutiesProvider := &dutiesProvider{jsonRestHandler: jsonRestHandler}
|
||||
proposerDuties, err := dutiesProvider.ProposerDuties(ctx, epoch)
|
||||
require.NoError(t, err)
|
||||
assert.DeepEqual(t, expectedProposerDuties.Data, proposerDuties)
|
||||
}
|
||||
|
||||
func TestGetProposerDuties_HttpError(t *testing.T) {
|
||||
const epoch = primitives.Epoch(1)
|
||||
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
fmt.Sprintf("%s/%d", getProposerDutiesTestEndpoint, epoch),
|
||||
gomock.Any(),
|
||||
).Return(
|
||||
errors.New("foo error"),
|
||||
).Times(1)
|
||||
|
||||
dutiesProvider := &dutiesProvider{jsonRestHandler: jsonRestHandler}
|
||||
_, err := dutiesProvider.ProposerDuties(ctx, epoch)
|
||||
assert.ErrorContains(t, "foo error", err)
|
||||
}
|
||||
|
||||
func TestGetProposerDuties_NilData(t *testing.T) {
|
||||
const epoch = primitives.Epoch(1)
|
||||
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
fmt.Sprintf("%s/%d", getProposerDutiesTestEndpoint, epoch),
|
||||
gomock.Any(),
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
structs.GetProposerDutiesResponse{
|
||||
Data: nil,
|
||||
},
|
||||
).Times(1)
|
||||
|
||||
dutiesProvider := &dutiesProvider{jsonRestHandler: jsonRestHandler}
|
||||
_, err := dutiesProvider.ProposerDuties(ctx, epoch)
|
||||
assert.ErrorContains(t, "proposer duties data is nil", err)
|
||||
}
|
||||
|
||||
func TestGetProposerDuties_NilProposerDuty(t *testing.T) {
|
||||
const epoch = primitives.Epoch(1)
|
||||
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
fmt.Sprintf("%s/%d", getProposerDutiesTestEndpoint, epoch),
|
||||
gomock.Any(),
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
structs.GetProposerDutiesResponse{
|
||||
Data: []*structs.ProposerDuty{nil},
|
||||
},
|
||||
).Times(1)
|
||||
|
||||
dutiesProvider := &dutiesProvider{jsonRestHandler: jsonRestHandler}
|
||||
_, err := dutiesProvider.ProposerDuties(ctx, epoch)
|
||||
assert.ErrorContains(t, "proposer duty at index `0` is nil", err)
|
||||
}
|
||||
|
||||
func TestGetSyncDuties_Valid(t *testing.T) {
|
||||
stringValidatorIndices := []string{"2", "6"}
|
||||
const epoch = primitives.Epoch(1)
|
||||
|
||||
validatorIndicesBytes, err := json.Marshal(stringValidatorIndices)
|
||||
require.NoError(t, err)
|
||||
|
||||
expectedSyncDuties := structs.GetSyncCommitteeDutiesResponse{
|
||||
Data: []*structs.SyncCommitteeDuty{
|
||||
{
|
||||
Pubkey: hexutil.Encode([]byte{1}),
|
||||
ValidatorIndex: "2",
|
||||
ValidatorSyncCommitteeIndices: []string{
|
||||
"3",
|
||||
"4",
|
||||
},
|
||||
},
|
||||
{
|
||||
Pubkey: hexutil.Encode([]byte{5}),
|
||||
ValidatorIndex: "6",
|
||||
ValidatorSyncCommitteeIndices: []string{
|
||||
"7",
|
||||
"8",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
validatorIndices := []primitives.ValidatorIndex{2, 6}
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Post(
|
||||
gomock.Any(),
|
||||
fmt.Sprintf("%s/%d", getSyncDutiesTestEndpoint, epoch),
|
||||
nil,
|
||||
bytes.NewBuffer(validatorIndicesBytes),
|
||||
&structs.GetSyncCommitteeDutiesResponse{},
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
4,
|
||||
expectedSyncDuties,
|
||||
).Times(1)
|
||||
|
||||
dutiesProvider := &dutiesProvider{jsonRestHandler: jsonRestHandler}
|
||||
syncDuties, err := dutiesProvider.SyncDuties(ctx, epoch, validatorIndices)
|
||||
require.NoError(t, err)
|
||||
assert.DeepEqual(t, expectedSyncDuties.Data, syncDuties)
|
||||
}
|
||||
|
||||
func TestGetSyncDuties_HttpError(t *testing.T) {
|
||||
const epoch = primitives.Epoch(1)
|
||||
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Post(
|
||||
gomock.Any(),
|
||||
fmt.Sprintf("%s/%d", getSyncDutiesTestEndpoint, epoch),
|
||||
gomock.Any(),
|
||||
gomock.Any(),
|
||||
gomock.Any(),
|
||||
).Return(
|
||||
errors.New("foo error"),
|
||||
).Times(1)
|
||||
|
||||
dutiesProvider := &dutiesProvider{jsonRestHandler: jsonRestHandler}
|
||||
_, err := dutiesProvider.SyncDuties(ctx, epoch, nil)
|
||||
assert.ErrorContains(t, "foo error", err)
|
||||
}
|
||||
|
||||
func TestGetSyncDuties_NilData(t *testing.T) {
|
||||
const epoch = primitives.Epoch(1)
|
||||
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Post(
|
||||
gomock.Any(),
|
||||
fmt.Sprintf("%s/%d", getSyncDutiesTestEndpoint, epoch),
|
||||
gomock.Any(),
|
||||
gomock.Any(),
|
||||
gomock.Any(),
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
4,
|
||||
structs.GetSyncCommitteeDutiesResponse{
|
||||
Data: nil,
|
||||
},
|
||||
).Times(1)
|
||||
|
||||
dutiesProvider := &dutiesProvider{jsonRestHandler: jsonRestHandler}
|
||||
_, err := dutiesProvider.SyncDuties(ctx, epoch, nil)
|
||||
assert.ErrorContains(t, "sync duties data is nil", err)
|
||||
}
|
||||
|
||||
func TestGetSyncDuties_NilSyncDuty(t *testing.T) {
|
||||
const epoch = primitives.Epoch(1)
|
||||
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Post(
|
||||
gomock.Any(),
|
||||
fmt.Sprintf("%s/%d", getSyncDutiesTestEndpoint, epoch),
|
||||
gomock.Any(),
|
||||
gomock.Any(),
|
||||
gomock.Any(),
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
4,
|
||||
structs.GetSyncCommitteeDutiesResponse{
|
||||
Data: []*structs.SyncCommitteeDuty{nil},
|
||||
},
|
||||
).Times(1)
|
||||
|
||||
dutiesProvider := &dutiesProvider{jsonRestHandler: jsonRestHandler}
|
||||
_, err := dutiesProvider.SyncDuties(ctx, epoch, nil)
|
||||
assert.ErrorContains(t, "sync duty at index `0` is nil", err)
|
||||
}
|
||||
|
||||
func TestGetCommittees_Valid(t *testing.T) {
|
||||
const epoch = primitives.Epoch(1)
|
||||
|
||||
expectedCommittees := structs.GetCommitteesResponse{
|
||||
Data: []*structs.Committee{
|
||||
{
|
||||
Index: "1",
|
||||
Slot: "2",
|
||||
Validators: []string{
|
||||
"3",
|
||||
"4",
|
||||
},
|
||||
},
|
||||
{
|
||||
Index: "5",
|
||||
Slot: "6",
|
||||
Validators: []string{
|
||||
"7",
|
||||
"8",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
fmt.Sprintf("%s?epoch=%d", getCommitteesTestEndpoint, epoch),
|
||||
&structs.GetCommitteesResponse{},
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
expectedCommittees,
|
||||
).Times(1)
|
||||
|
||||
dutiesProvider := &dutiesProvider{jsonRestHandler: jsonRestHandler}
|
||||
committees, err := dutiesProvider.Committees(ctx, epoch)
|
||||
require.NoError(t, err)
|
||||
assert.DeepEqual(t, expectedCommittees.Data, committees)
|
||||
}
|
||||
|
||||
func TestGetCommittees_HttpError(t *testing.T) {
|
||||
const epoch = primitives.Epoch(1)
|
||||
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
fmt.Sprintf("%s?epoch=%d", getCommitteesTestEndpoint, epoch),
|
||||
gomock.Any(),
|
||||
).Return(
|
||||
errors.New("foo error"),
|
||||
).Times(1)
|
||||
|
||||
dutiesProvider := &dutiesProvider{jsonRestHandler: jsonRestHandler}
|
||||
_, err := dutiesProvider.Committees(ctx, epoch)
|
||||
assert.ErrorContains(t, "foo error", err)
|
||||
}
|
||||
|
||||
func TestGetCommittees_NilData(t *testing.T) {
|
||||
const epoch = primitives.Epoch(1)
|
||||
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
fmt.Sprintf("%s?epoch=%d", getCommitteesTestEndpoint, epoch),
|
||||
gomock.Any(),
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
structs.GetCommitteesResponse{
|
||||
Data: nil,
|
||||
},
|
||||
).Times(1)
|
||||
|
||||
dutiesProvider := &dutiesProvider{jsonRestHandler: jsonRestHandler}
|
||||
_, err := dutiesProvider.Committees(ctx, epoch)
|
||||
assert.ErrorContains(t, "state committees data is nil", err)
|
||||
}
|
||||
|
||||
func TestGetCommittees_NilCommittee(t *testing.T) {
|
||||
const epoch = primitives.Epoch(1)
|
||||
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
fmt.Sprintf("%s?epoch=%d", getCommitteesTestEndpoint, epoch),
|
||||
gomock.Any(),
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
structs.GetCommitteesResponse{
|
||||
Data: []*structs.Committee{nil},
|
||||
},
|
||||
).Times(1)
|
||||
|
||||
dutiesProvider := &dutiesProvider{jsonRestHandler: jsonRestHandler}
|
||||
_, err := dutiesProvider.Committees(ctx, epoch)
|
||||
assert.ErrorContains(t, "committee at index `0` is nil", err)
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
package shared_providers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
)
|
||||
|
||||
type genesisProvider struct {
|
||||
jsonRestHandler client.JsonRestHandler
|
||||
genesis *structs.Genesis
|
||||
once sync.Once
|
||||
}
|
||||
|
||||
// GetGenesis gets the genesis information from the beacon node via the /eth/v1/beacon/genesis endpoint
|
||||
func (c *genesisProvider) Genesis(ctx context.Context) (*structs.Genesis, error) {
|
||||
genesisJson := &structs.GetGenesisResponse{}
|
||||
var doErr error
|
||||
c.once.Do(func() {
|
||||
if err := c.jsonRestHandler.Get(ctx, "/eth/v1/beacon/genesis", genesisJson); err != nil {
|
||||
doErr = err
|
||||
return
|
||||
}
|
||||
if genesisJson.Data == nil {
|
||||
doErr = errors.New("genesis data is nil")
|
||||
return
|
||||
}
|
||||
c.genesis = genesisJson.Data
|
||||
})
|
||||
if doErr != nil {
|
||||
// Allow another call because the current one returned an error
|
||||
c.once = sync.Once{}
|
||||
}
|
||||
return c.genesis, doErr
|
||||
}
|
||||
@@ -1,144 +0,0 @@
|
||||
package shared_providers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/mock"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
"go.uber.org/mock/gomock"
|
||||
)
|
||||
|
||||
func TestGetGenesis_ValidGenesis(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
genesisResponseJson := structs.GetGenesisResponse{}
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
"/eth/v1/beacon/genesis",
|
||||
&genesisResponseJson,
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
structs.GetGenesisResponse{
|
||||
Data: &structs.Genesis{
|
||||
GenesisTime: "1234",
|
||||
GenesisValidatorsRoot: "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
},
|
||||
},
|
||||
).Times(1)
|
||||
|
||||
genesisProvider := &genesisProvider{jsonRestHandler: jsonRestHandler}
|
||||
resp, err := genesisProvider.Genesis(ctx)
|
||||
assert.NoError(t, err)
|
||||
require.NotNil(t, resp)
|
||||
assert.Equal(t, "1234", resp.GenesisTime)
|
||||
assert.Equal(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2", resp.GenesisValidatorsRoot)
|
||||
}
|
||||
|
||||
func TestGetGenesis_NilData(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
genesisResponseJson := structs.GetGenesisResponse{}
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
"/eth/v1/beacon/genesis",
|
||||
&genesisResponseJson,
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
structs.GetGenesisResponse{Data: nil},
|
||||
).Times(1)
|
||||
|
||||
genesisProvider := &genesisProvider{jsonRestHandler: jsonRestHandler}
|
||||
_, err := genesisProvider.Genesis(ctx)
|
||||
assert.ErrorContains(t, "genesis data is nil", err)
|
||||
}
|
||||
|
||||
func TestGetGenesis_EndpointCalledOnlyOnce(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
genesisResponseJson := structs.GetGenesisResponse{}
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
"/eth/v1/beacon/genesis",
|
||||
&genesisResponseJson,
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
structs.GetGenesisResponse{
|
||||
Data: &structs.Genesis{
|
||||
GenesisTime: "1234",
|
||||
GenesisValidatorsRoot: "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
},
|
||||
},
|
||||
).Times(1)
|
||||
|
||||
genesisProvider := &genesisProvider{jsonRestHandler: jsonRestHandler}
|
||||
_, err := genesisProvider.Genesis(ctx)
|
||||
assert.NoError(t, err)
|
||||
resp, err := genesisProvider.Genesis(ctx)
|
||||
assert.NoError(t, err)
|
||||
require.NotNil(t, resp)
|
||||
assert.Equal(t, "1234", resp.GenesisTime)
|
||||
assert.Equal(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2", resp.GenesisValidatorsRoot)
|
||||
}
|
||||
|
||||
func TestGetGenesis_EndpointCanBeCalledAgainAfterError(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
genesisResponseJson := structs.GetGenesisResponse{}
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
"/eth/v1/beacon/genesis",
|
||||
&genesisResponseJson,
|
||||
).Return(
|
||||
errors.New("foo"),
|
||||
).Times(1)
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
"/eth/v1/beacon/genesis",
|
||||
&genesisResponseJson,
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
structs.GetGenesisResponse{
|
||||
Data: &structs.Genesis{
|
||||
GenesisTime: "1234",
|
||||
GenesisValidatorsRoot: "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
},
|
||||
},
|
||||
).Times(1)
|
||||
|
||||
genesisProvider := &genesisProvider{jsonRestHandler: jsonRestHandler}
|
||||
_, err := genesisProvider.Genesis(ctx)
|
||||
require.ErrorContains(t, "foo", err)
|
||||
resp, err := genesisProvider.Genesis(ctx)
|
||||
assert.NoError(t, err)
|
||||
require.NotNil(t, resp)
|
||||
assert.Equal(t, "1234", resp.GenesisTime)
|
||||
assert.Equal(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2", resp.GenesisValidatorsRoot)
|
||||
}
|
||||
@@ -1,25 +0,0 @@
|
||||
package shared_providers
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
)
|
||||
|
||||
type Genesis interface {
|
||||
Genesis(ctx context.Context) (*structs.Genesis, error)
|
||||
}
|
||||
|
||||
type StateValidators interface {
|
||||
StateValidators(context.Context, []string, []primitives.ValidatorIndex, []string) (*structs.GetValidatorsResponse, error)
|
||||
StateValidatorsForSlot(context.Context, primitives.Slot, []string, []primitives.ValidatorIndex, []string) (*structs.GetValidatorsResponse, error)
|
||||
StateValidatorsForHead(context.Context, []string, []primitives.ValidatorIndex, []string) (*structs.GetValidatorsResponse, error)
|
||||
}
|
||||
|
||||
type Duties interface {
|
||||
AttesterDuties(ctx context.Context, epoch primitives.Epoch, validatorIndices []primitives.ValidatorIndex) ([]*structs.AttesterDuty, error)
|
||||
ProposerDuties(ctx context.Context, epoch primitives.Epoch) ([]*structs.ProposerDuty, error)
|
||||
SyncDuties(ctx context.Context, epoch primitives.Epoch, validatorIndices []primitives.ValidatorIndex) ([]*structs.SyncCommitteeDuty, error)
|
||||
Committees(ctx context.Context, epoch primitives.Epoch) ([]*structs.Committee, error)
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
package shared_providers
|
||||
|
||||
import (
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client"
|
||||
)
|
||||
|
||||
func NewStateValidators(jsonRestHandler client.JsonRestHandler) StateValidators {
|
||||
return &stateValidatorsProvider{jsonRestHandler: jsonRestHandler}
|
||||
}
|
||||
|
||||
func NewDuties(jsonRestHandler client.JsonRestHandler) Duties {
|
||||
return &dutiesProvider{jsonRestHandler: jsonRestHandler}
|
||||
}
|
||||
|
||||
func NewGenesis(jsonRestHandler client.JsonRestHandler) Genesis {
|
||||
return &genesisProvider{jsonRestHandler: jsonRestHandler}
|
||||
}
|
||||
@@ -1,123 +0,0 @@
|
||||
package shared_providers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strconv"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/apiutil"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
)
|
||||
|
||||
type stateValidatorsProvider struct {
|
||||
jsonRestHandler client.JsonRestHandler
|
||||
}
|
||||
|
||||
func (c stateValidatorsProvider) StateValidators(
|
||||
ctx context.Context,
|
||||
stringPubkeys []string,
|
||||
indexes []primitives.ValidatorIndex,
|
||||
statuses []string,
|
||||
) (*structs.GetValidatorsResponse, error) {
|
||||
stringIndices := convertValidatorIndicesToStrings(indexes)
|
||||
return c.getStateValidatorsHelper(ctx, "/eth/v1/beacon/states/head/validators", append(stringIndices, stringPubkeys...), statuses)
|
||||
}
|
||||
|
||||
func (c stateValidatorsProvider) StateValidatorsForSlot(
|
||||
ctx context.Context,
|
||||
slot primitives.Slot,
|
||||
stringPubkeys []string,
|
||||
indices []primitives.ValidatorIndex,
|
||||
statuses []string,
|
||||
) (*structs.GetValidatorsResponse, error) {
|
||||
stringIndices := convertValidatorIndicesToStrings(indices)
|
||||
return c.getStateValidatorsHelper(ctx, fmt.Sprintf("/eth/v1/beacon/states/%d/validators", slot), append(stringIndices, stringPubkeys...), statuses)
|
||||
}
|
||||
|
||||
func (c stateValidatorsProvider) StateValidatorsForHead(
|
||||
ctx context.Context,
|
||||
stringPubkeys []string,
|
||||
indices []primitives.ValidatorIndex,
|
||||
statuses []string,
|
||||
) (*structs.GetValidatorsResponse, error) {
|
||||
stringIndices := convertValidatorIndicesToStrings(indices)
|
||||
return c.getStateValidatorsHelper(ctx, "/eth/v1/beacon/states/head/validators", append(stringIndices, stringPubkeys...), statuses)
|
||||
}
|
||||
|
||||
func convertValidatorIndicesToStrings(indices []primitives.ValidatorIndex) []string {
|
||||
var result []string
|
||||
indicesSet := make(map[primitives.ValidatorIndex]struct{}, len(indices))
|
||||
for _, index := range indices {
|
||||
if _, ok := indicesSet[index]; !ok {
|
||||
indicesSet[index] = struct{}{}
|
||||
result = append(result, strconv.FormatUint(uint64(index), 10))
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func (c stateValidatorsProvider) getStateValidatorsHelper(
|
||||
ctx context.Context,
|
||||
endpoint string,
|
||||
vals []string,
|
||||
statuses []string,
|
||||
) (*structs.GetValidatorsResponse, error) {
|
||||
req := structs.GetValidatorsRequest{
|
||||
Ids: []string{},
|
||||
Statuses: []string{},
|
||||
}
|
||||
req.Statuses = append(req.Statuses, statuses...)
|
||||
|
||||
valSet := make(map[string]struct{}, len(vals))
|
||||
for _, v := range vals {
|
||||
if _, ok := valSet[v]; !ok {
|
||||
valSet[v] = struct{}{}
|
||||
req.Ids = append(req.Ids, v)
|
||||
}
|
||||
}
|
||||
|
||||
reqBytes, err := json.Marshal(req)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to marshal request into JSON")
|
||||
}
|
||||
stateValidatorsJson := &structs.GetValidatorsResponse{}
|
||||
// First try POST endpoint to check whether it is supported by the beacon node.
|
||||
if err = c.jsonRestHandler.Post(ctx, endpoint, nil, bytes.NewBuffer(reqBytes), stateValidatorsJson); err == nil {
|
||||
if stateValidatorsJson.Data == nil {
|
||||
return nil, errors.New("stateValidatorsJson.Data is nil")
|
||||
}
|
||||
|
||||
return stateValidatorsJson, nil
|
||||
}
|
||||
|
||||
// Re-initialise the response just in case.
|
||||
stateValidatorsJson = &structs.GetValidatorsResponse{}
|
||||
|
||||
// Seems like POST isn't supported by the beacon node, let's try the GET one.
|
||||
queryParams := url.Values{}
|
||||
for _, id := range req.Ids {
|
||||
queryParams.Add("id", id)
|
||||
}
|
||||
for _, st := range req.Statuses {
|
||||
queryParams.Add("status", st)
|
||||
}
|
||||
|
||||
query := apiutil.BuildURL(endpoint, queryParams)
|
||||
|
||||
err = c.jsonRestHandler.Get(ctx, query, stateValidatorsJson)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if stateValidatorsJson.Data == nil {
|
||||
return nil, errors.New("stateValidatorsJson.Data is nil")
|
||||
}
|
||||
|
||||
return stateValidatorsJson, nil
|
||||
}
|
||||
@@ -1,365 +0,0 @@
|
||||
package shared_providers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/url"
|
||||
"testing"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/apiutil"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/mock"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
"go.uber.org/mock/gomock"
|
||||
)
|
||||
|
||||
func TestGetStateValidators_Nominal_POST(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
req := &structs.GetValidatorsRequest{
|
||||
Ids: []string{
|
||||
"12345",
|
||||
"0x8000091c2ae64ee414a54c1cc1fc67dec663408bc636cb86756e0200e41a75c8f86603f104f02c856983d2783116be13",
|
||||
"0x80000e851c0f53c3246ff726d7ff7766661ca5e12a07c45c114d208d54f0f8233d4380b2e9aff759d69795d1df905526",
|
||||
"0x424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242",
|
||||
"0x800015473bdc3a7f45ef8eb8abc598bc20021e55ad6e6ad1d745aaef9730dd2c28ec08bf42df18451de94dd4a6d24ec5",
|
||||
},
|
||||
Statuses: []string{"active_ongoing", "active_exiting", "exited_slashed", "exited_unslashed"},
|
||||
}
|
||||
reqBytes, err := json.Marshal(req)
|
||||
require.NoError(t, err)
|
||||
|
||||
stateValidatorsResponseJson := structs.GetValidatorsResponse{}
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
|
||||
wanted := []*structs.ValidatorContainer{
|
||||
{
|
||||
Index: "12345",
|
||||
Status: "active_ongoing",
|
||||
Validator: &structs.Validator{
|
||||
Pubkey: "0x8000091c2ae64ee414a54c1cc1fc67dec663408bc636cb86756e0200e41a75c8f86603f104f02c856983d2783116be19",
|
||||
},
|
||||
},
|
||||
{
|
||||
Index: "55293",
|
||||
Status: "active_ongoing",
|
||||
Validator: &structs.Validator{
|
||||
Pubkey: "0x8000091c2ae64ee414a54c1cc1fc67dec663408bc636cb86756e0200e41a75c8f86603f104f02c856983d2783116be13",
|
||||
},
|
||||
},
|
||||
{
|
||||
Index: "55294",
|
||||
Status: "active_exiting",
|
||||
Validator: &structs.Validator{
|
||||
Pubkey: "0x80000e851c0f53c3246ff726d7ff7766661ca5e12a07c45c114d208d54f0f8233d4380b2e9aff759d69795d1df905526",
|
||||
},
|
||||
},
|
||||
{
|
||||
Index: "55295",
|
||||
Status: "exited_slashed",
|
||||
Validator: &structs.Validator{
|
||||
Pubkey: "0x800015473bdc3a7f45ef8eb8abc598bc20021e55ad6e6ad1d745aaef9730dd2c28ec08bf42df18451de94dd4a6d24ec5",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler.EXPECT().Post(
|
||||
gomock.Any(),
|
||||
"/eth/v1/beacon/states/head/validators",
|
||||
nil,
|
||||
bytes.NewBuffer(reqBytes),
|
||||
&stateValidatorsResponseJson,
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
4,
|
||||
structs.GetValidatorsResponse{
|
||||
Data: wanted,
|
||||
},
|
||||
).Times(1)
|
||||
|
||||
stateValidatorsProvider := stateValidatorsProvider{jsonRestHandler: jsonRestHandler}
|
||||
actual, err := stateValidatorsProvider.StateValidators(ctx, []string{
|
||||
"0x8000091c2ae64ee414a54c1cc1fc67dec663408bc636cb86756e0200e41a75c8f86603f104f02c856983d2783116be13", // active_ongoing
|
||||
"0x80000e851c0f53c3246ff726d7ff7766661ca5e12a07c45c114d208d54f0f8233d4380b2e9aff759d69795d1df905526", // active_exiting
|
||||
"0x424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242", // does not exist
|
||||
"0x8000091c2ae64ee414a54c1cc1fc67dec663408bc636cb86756e0200e41a75c8f86603f104f02c856983d2783116be13", // active_ongoing - duplicate
|
||||
"0x800015473bdc3a7f45ef8eb8abc598bc20021e55ad6e6ad1d745aaef9730dd2c28ec08bf42df18451de94dd4a6d24ec5", // exited_slashed
|
||||
},
|
||||
[]primitives.ValidatorIndex{
|
||||
12345, // active_ongoing
|
||||
12345, // active_ongoing - duplicate
|
||||
},
|
||||
[]string{"active_ongoing", "active_exiting", "exited_slashed", "exited_unslashed"},
|
||||
)
|
||||
require.NoError(t, err)
|
||||
assert.DeepEqual(t, wanted, actual.Data)
|
||||
}
|
||||
|
||||
func TestGetStateValidators_Nominal_GET(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
req := &structs.GetValidatorsRequest{
|
||||
Ids: []string{
|
||||
"12345",
|
||||
"0x8000091c2ae64ee414a54c1cc1fc67dec663408bc636cb86756e0200e41a75c8f86603f104f02c856983d2783116be13",
|
||||
"0x80000e851c0f53c3246ff726d7ff7766661ca5e12a07c45c114d208d54f0f8233d4380b2e9aff759d69795d1df905526",
|
||||
"0x424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242",
|
||||
"0x800015473bdc3a7f45ef8eb8abc598bc20021e55ad6e6ad1d745aaef9730dd2c28ec08bf42df18451de94dd4a6d24ec5",
|
||||
},
|
||||
Statuses: []string{"active_ongoing", "active_exiting", "exited_slashed", "exited_unslashed"},
|
||||
}
|
||||
reqBytes, err := json.Marshal(req)
|
||||
require.NoError(t, err)
|
||||
|
||||
stateValidatorsResponseJson := structs.GetValidatorsResponse{}
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
|
||||
wanted := []*structs.ValidatorContainer{
|
||||
{
|
||||
Index: "12345",
|
||||
Status: "active_ongoing",
|
||||
Validator: &structs.Validator{
|
||||
Pubkey: "0x8000091c2ae64ee414a54c1cc1fc67dec663408bc636cb86756e0200e41a75c8f86603f104f02c856983d2783116be19",
|
||||
},
|
||||
},
|
||||
{
|
||||
Index: "55293",
|
||||
Status: "active_ongoing",
|
||||
Validator: &structs.Validator{
|
||||
Pubkey: "0x8000091c2ae64ee414a54c1cc1fc67dec663408bc636cb86756e0200e41a75c8f86603f104f02c856983d2783116be13",
|
||||
},
|
||||
},
|
||||
{
|
||||
Index: "55294",
|
||||
Status: "active_exiting",
|
||||
Validator: &structs.Validator{
|
||||
Pubkey: "0x80000e851c0f53c3246ff726d7ff7766661ca5e12a07c45c114d208d54f0f8233d4380b2e9aff759d69795d1df905526",
|
||||
},
|
||||
},
|
||||
{
|
||||
Index: "55295",
|
||||
Status: "exited_slashed",
|
||||
Validator: &structs.Validator{
|
||||
Pubkey: "0x800015473bdc3a7f45ef8eb8abc598bc20021e55ad6e6ad1d745aaef9730dd2c28ec08bf42df18451de94dd4a6d24ec5",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// First return an error from POST call.
|
||||
jsonRestHandler.EXPECT().Post(
|
||||
gomock.Any(),
|
||||
"/eth/v1/beacon/states/head/validators",
|
||||
nil,
|
||||
bytes.NewBuffer(reqBytes),
|
||||
&stateValidatorsResponseJson,
|
||||
).Return(
|
||||
errors.New("an error"),
|
||||
).Times(1)
|
||||
|
||||
// Then try the GET call which will be successful.
|
||||
queryParams := url.Values{}
|
||||
for _, id := range req.Ids {
|
||||
queryParams.Add("id", id)
|
||||
}
|
||||
for _, st := range req.Statuses {
|
||||
queryParams.Add("status", st)
|
||||
}
|
||||
|
||||
query := apiutil.BuildURL("/eth/v1/beacon/states/head/validators", queryParams)
|
||||
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
query,
|
||||
&stateValidatorsResponseJson,
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
structs.GetValidatorsResponse{
|
||||
Data: wanted,
|
||||
},
|
||||
).Times(1)
|
||||
|
||||
stateValidatorsProvider := stateValidatorsProvider{jsonRestHandler: jsonRestHandler}
|
||||
actual, err := stateValidatorsProvider.StateValidators(ctx, []string{
|
||||
"0x8000091c2ae64ee414a54c1cc1fc67dec663408bc636cb86756e0200e41a75c8f86603f104f02c856983d2783116be13", // active_ongoing
|
||||
"0x80000e851c0f53c3246ff726d7ff7766661ca5e12a07c45c114d208d54f0f8233d4380b2e9aff759d69795d1df905526", // active_exiting
|
||||
"0x424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242", // does not exist
|
||||
"0x8000091c2ae64ee414a54c1cc1fc67dec663408bc636cb86756e0200e41a75c8f86603f104f02c856983d2783116be13", // active_ongoing - duplicate
|
||||
"0x800015473bdc3a7f45ef8eb8abc598bc20021e55ad6e6ad1d745aaef9730dd2c28ec08bf42df18451de94dd4a6d24ec5", // exited_slashed
|
||||
},
|
||||
[]primitives.ValidatorIndex{
|
||||
12345, // active_ongoing
|
||||
12345, // active_ongoing - duplicate
|
||||
},
|
||||
[]string{"active_ongoing", "active_exiting", "exited_slashed", "exited_unslashed"},
|
||||
)
|
||||
require.NoError(t, err)
|
||||
assert.DeepEqual(t, wanted, actual.Data)
|
||||
}
|
||||
|
||||
func TestGetStateValidators_GetRestJsonResponseOnError(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
req := &structs.GetValidatorsRequest{
|
||||
Ids: []string{"0x8000091c2ae64ee414a54c1cc1fc67dec663408bc636cb86756e0200e41a75c8f86603f104f02c856983d2783116be13"},
|
||||
Statuses: []string{},
|
||||
}
|
||||
reqBytes, err := json.Marshal(req)
|
||||
require.NoError(t, err)
|
||||
|
||||
stateValidatorsResponseJson := structs.GetValidatorsResponse{}
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// First call POST.
|
||||
jsonRestHandler.EXPECT().Post(
|
||||
gomock.Any(),
|
||||
"/eth/v1/beacon/states/head/validators",
|
||||
nil,
|
||||
bytes.NewBuffer(reqBytes),
|
||||
&stateValidatorsResponseJson,
|
||||
).Return(
|
||||
errors.New("an error"),
|
||||
).Times(1)
|
||||
|
||||
// Call to GET endpoint upon receiving error from POST call.
|
||||
queryParams := url.Values{}
|
||||
for _, id := range req.Ids {
|
||||
queryParams.Add("id", id)
|
||||
}
|
||||
for _, st := range req.Statuses {
|
||||
queryParams.Add("status", st)
|
||||
}
|
||||
|
||||
query := apiutil.BuildURL("/eth/v1/beacon/states/head/validators", queryParams)
|
||||
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
query,
|
||||
&stateValidatorsResponseJson,
|
||||
).Return(
|
||||
errors.New("an error"),
|
||||
).Times(1)
|
||||
|
||||
stateValidatorsProvider := stateValidatorsProvider{jsonRestHandler: jsonRestHandler}
|
||||
_, err = stateValidatorsProvider.StateValidators(ctx, []string{
|
||||
"0x8000091c2ae64ee414a54c1cc1fc67dec663408bc636cb86756e0200e41a75c8f86603f104f02c856983d2783116be13", // active_ongoing
|
||||
},
|
||||
nil,
|
||||
nil,
|
||||
)
|
||||
assert.ErrorContains(t, "an error", err)
|
||||
}
|
||||
|
||||
func TestGetStateValidators_DataIsNil_POST(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
req := &structs.GetValidatorsRequest{
|
||||
Ids: []string{"0x8000091c2ae64ee414a54c1cc1fc67dec663408bc636cb86756e0200e41a75c8f86603f104f02c856983d2783116be13"},
|
||||
Statuses: []string{},
|
||||
}
|
||||
reqBytes, err := json.Marshal(req)
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx := context.Background()
|
||||
stateValidatorsResponseJson := structs.GetValidatorsResponse{}
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
|
||||
jsonRestHandler.EXPECT().Post(
|
||||
gomock.Any(),
|
||||
"/eth/v1/beacon/states/head/validators",
|
||||
nil, bytes.NewBuffer(reqBytes),
|
||||
&stateValidatorsResponseJson,
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
4,
|
||||
structs.GetValidatorsResponse{
|
||||
Data: nil,
|
||||
},
|
||||
).Times(1)
|
||||
|
||||
stateValidatorsProvider := stateValidatorsProvider{jsonRestHandler: jsonRestHandler}
|
||||
_, err = stateValidatorsProvider.StateValidators(ctx, []string{
|
||||
"0x8000091c2ae64ee414a54c1cc1fc67dec663408bc636cb86756e0200e41a75c8f86603f104f02c856983d2783116be13", // active_ongoing
|
||||
},
|
||||
nil,
|
||||
nil,
|
||||
)
|
||||
assert.ErrorContains(t, "stateValidatorsJson.Data is nil", err)
|
||||
}
|
||||
|
||||
func TestGetStateValidators_DataIsNil_GET(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
req := &structs.GetValidatorsRequest{
|
||||
Ids: []string{"0x8000091c2ae64ee414a54c1cc1fc67dec663408bc636cb86756e0200e41a75c8f86603f104f02c856983d2783116be13"},
|
||||
Statuses: []string{},
|
||||
}
|
||||
reqBytes, err := json.Marshal(req)
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx := context.Background()
|
||||
stateValidatorsResponseJson := structs.GetValidatorsResponse{}
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
|
||||
// First call POST which will return an error.
|
||||
jsonRestHandler.EXPECT().Post(
|
||||
gomock.Any(),
|
||||
"/eth/v1/beacon/states/head/validators",
|
||||
nil,
|
||||
bytes.NewBuffer(reqBytes),
|
||||
&stateValidatorsResponseJson,
|
||||
).Return(
|
||||
errors.New("an error"),
|
||||
).Times(1)
|
||||
|
||||
// Then call GET which returns nil Data.
|
||||
queryParams := url.Values{}
|
||||
for _, id := range req.Ids {
|
||||
queryParams.Add("id", id)
|
||||
}
|
||||
for _, st := range req.Statuses {
|
||||
queryParams.Add("status", st)
|
||||
}
|
||||
|
||||
query := apiutil.BuildURL("/eth/v1/beacon/states/head/validators", queryParams)
|
||||
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
query,
|
||||
&stateValidatorsResponseJson,
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
structs.GetValidatorsResponse{
|
||||
Data: nil,
|
||||
},
|
||||
).Times(1)
|
||||
|
||||
stateValidatorsProvider := stateValidatorsProvider{jsonRestHandler: jsonRestHandler}
|
||||
_, err = stateValidatorsProvider.StateValidators(ctx, []string{
|
||||
"0x8000091c2ae64ee414a54c1cc1fc67dec663408bc636cb86756e0200e41a75c8f86603f104f02c856983d2783116be13", // active_ongoing
|
||||
},
|
||||
nil,
|
||||
nil,
|
||||
)
|
||||
assert.ErrorContains(t, "stateValidatorsJson.Data is nil", err)
|
||||
}
|
||||
@@ -1,145 +0,0 @@
|
||||
package beacon
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
)
|
||||
|
||||
type BeaconCommitteeSelection struct {
|
||||
SelectionProof []byte
|
||||
Slot primitives.Slot
|
||||
ValidatorIndex primitives.ValidatorIndex
|
||||
}
|
||||
|
||||
type beaconCommitteeSelectionJson struct {
|
||||
SelectionProof string `json:"selection_proof"`
|
||||
Slot string `json:"slot"`
|
||||
ValidatorIndex string `json:"validator_index"`
|
||||
}
|
||||
|
||||
func (b *BeaconCommitteeSelection) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(beaconCommitteeSelectionJson{
|
||||
SelectionProof: hexutil.Encode(b.SelectionProof),
|
||||
Slot: strconv.FormatUint(uint64(b.Slot), 10),
|
||||
ValidatorIndex: strconv.FormatUint(uint64(b.ValidatorIndex), 10),
|
||||
})
|
||||
}
|
||||
|
||||
func (b *BeaconCommitteeSelection) UnmarshalJSON(input []byte) error {
|
||||
var bjson beaconCommitteeSelectionJson
|
||||
err := json.Unmarshal(input, &bjson)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "failed to unmarshal beacon committee selection")
|
||||
}
|
||||
|
||||
slot, err := strconv.ParseUint(bjson.Slot, 10, 64)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "failed to parse slot")
|
||||
}
|
||||
|
||||
vIdx, err := strconv.ParseUint(bjson.ValidatorIndex, 10, 64)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "failed to parse validator index")
|
||||
}
|
||||
|
||||
selectionProof, err := hexutil.Decode(bjson.SelectionProof)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "failed to parse selection proof")
|
||||
}
|
||||
|
||||
b.Slot = primitives.Slot(slot)
|
||||
b.SelectionProof = selectionProof
|
||||
b.ValidatorIndex = primitives.ValidatorIndex(vIdx)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type SyncCommitteeSelection struct {
|
||||
SelectionProof []byte
|
||||
Slot primitives.Slot
|
||||
SubcommitteeIndex primitives.CommitteeIndex
|
||||
ValidatorIndex primitives.ValidatorIndex
|
||||
}
|
||||
|
||||
type syncCommitteeSelectionJson struct {
|
||||
SelectionProof string `json:"selection_proof"`
|
||||
Slot string `json:"slot"`
|
||||
SubcommitteeIndex string `json:"subcommittee_index"`
|
||||
ValidatorIndex string `json:"validator_index"`
|
||||
}
|
||||
|
||||
func (s *SyncCommitteeSelection) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(syncCommitteeSelectionJson{
|
||||
SelectionProof: hexutil.Encode(s.SelectionProof),
|
||||
Slot: strconv.FormatUint(uint64(s.Slot), 10),
|
||||
SubcommitteeIndex: strconv.FormatUint(uint64(s.SubcommitteeIndex), 10),
|
||||
ValidatorIndex: strconv.FormatUint(uint64(s.ValidatorIndex), 10),
|
||||
})
|
||||
}
|
||||
|
||||
func (s *SyncCommitteeSelection) UnmarshalJSON(input []byte) error {
|
||||
var resJson syncCommitteeSelectionJson
|
||||
err := json.Unmarshal(input, &resJson)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "failed to unmarshal sync committee selection")
|
||||
}
|
||||
|
||||
slot, err := strconv.ParseUint(resJson.Slot, 10, 64)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "failed to parse slot")
|
||||
}
|
||||
|
||||
vIdx, err := strconv.ParseUint(resJson.ValidatorIndex, 10, 64)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "failed to parse validator index")
|
||||
}
|
||||
|
||||
subcommIdx, err := strconv.ParseUint(resJson.SubcommitteeIndex, 10, 64)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "failed to parse subcommittee index")
|
||||
}
|
||||
|
||||
selectionProof, err := hexutil.Decode(resJson.SelectionProof)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "failed to parse selection proof")
|
||||
}
|
||||
|
||||
s.Slot = primitives.Slot(slot)
|
||||
s.SelectionProof = selectionProof
|
||||
s.ValidatorIndex = primitives.ValidatorIndex(vIdx)
|
||||
s.SubcommitteeIndex = primitives.CommitteeIndex(subcommIdx)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type AggregatedSelectionResponse struct {
|
||||
Data []BeaconCommitteeSelection `json:"data"`
|
||||
}
|
||||
|
||||
type AggregatedSyncSelectionResponse struct {
|
||||
Data []SyncCommitteeSelection `json:"data"`
|
||||
}
|
||||
|
||||
type AttesterDuty struct {
|
||||
CommitteeIndex primitives.CommitteeIndex
|
||||
Slot primitives.Slot
|
||||
CommitteeLength uint64
|
||||
ValidatorCommitteeIndex uint64
|
||||
CommitteesAtSlot uint64
|
||||
}
|
||||
|
||||
type ValidatorForDuty struct {
|
||||
Pubkey []byte
|
||||
Index primitives.ValidatorIndex
|
||||
Status ethpb.ValidatorStatus
|
||||
}
|
||||
|
||||
type ValidatorCount struct {
|
||||
Status string
|
||||
Count uint64
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
package beacon
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"text/template"
|
||||
)
|
||||
|
||||
type templateFn func(StateOrBlockId) string
|
||||
|
||||
var getBlockRootTpl templateFn
|
||||
var getForkTpl templateFn
|
||||
|
||||
func init() {
|
||||
// idTemplate is used to create template functions that can interpolate StateOrBlockId values.
|
||||
idTemplate := func(ts string) func(StateOrBlockId) string {
|
||||
t := template.Must(template.New("").Parse(ts))
|
||||
f := func(id StateOrBlockId) string {
|
||||
b := bytes.NewBuffer(nil)
|
||||
err := t.Execute(b, struct{ Id string }{Id: string(id)})
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("invalid idTemplate: %s", ts))
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
// run the template to ensure that it is valid
|
||||
// this should happen load time (using package scoped vars) to ensure runtime errors aren't possible
|
||||
_ = f(IdGenesis)
|
||||
return f
|
||||
}
|
||||
|
||||
getBlockRootTpl = idTemplate(getBlockRootPath)
|
||||
getForkTpl = idTemplate(getForkForStatePath)
|
||||
}
|
||||
@@ -1,146 +0,0 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library", "go_test")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"attestation_data.go",
|
||||
"beacon_api_helpers.go",
|
||||
"beacon_block_converter.go",
|
||||
"beacon_block_json_helpers.go",
|
||||
"beacon_block_proto_helpers.go",
|
||||
"client.go",
|
||||
"domain_data.go",
|
||||
"doppelganger.go",
|
||||
"duties.go",
|
||||
"get_beacon_block.go",
|
||||
"grpc_client.go",
|
||||
"index.go",
|
||||
"interfaces.go",
|
||||
"log.go",
|
||||
"metrics.go",
|
||||
"prepare_beacon_proposer.go",
|
||||
"propose_attestation.go",
|
||||
"propose_beacon_block.go",
|
||||
"propose_exit.go",
|
||||
"registration.go",
|
||||
"rest_client.go",
|
||||
"status.go",
|
||||
"stream_blocks.go",
|
||||
"submit_aggregate_selection_proof.go",
|
||||
"submit_signed_aggregate_proof.go",
|
||||
"submit_signed_contribution_and_proof.go",
|
||||
"subscribe_committee_subnets.go",
|
||||
"sync_committee.go",
|
||||
"sync_committee_selections.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/client/beacon/validator_api",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//api/client:go_default_library",
|
||||
"//api/client/apiutil:go_default_library",
|
||||
"//api/client/beacon:go_default_library",
|
||||
"//api/client/beacon/node:go_default_library",
|
||||
"//api/client/beacon/prysm_api:go_default_library",
|
||||
"//api/client/beacon/shared_providers:go_default_library",
|
||||
"//api/client/event:go_default_library",
|
||||
"//api/server/structs:go_default_library",
|
||||
"//beacon-chain/core/helpers:go_default_library",
|
||||
"//beacon-chain/core/signing:go_default_library",
|
||||
"//config/features:go_default_library",
|
||||
"//config/params:go_default_library",
|
||||
"//consensus-types/primitives:go_default_library",
|
||||
"//consensus-types/validator:go_default_library",
|
||||
"//encoding/bytesutil:go_default_library",
|
||||
"//monitoring/tracing/trace:go_default_library",
|
||||
"//network/forks:go_default_library",
|
||||
"//network/httputil:go_default_library",
|
||||
"//proto/engine/v1:go_default_library",
|
||||
"//proto/prysm/v1alpha1:go_default_library",
|
||||
"//runtime/version:go_default_library",
|
||||
"//time/slots:go_default_library",
|
||||
"//validator/helpers:go_default_library",
|
||||
"@com_github_ethereum_go_ethereum//common/hexutil:go_default_library",
|
||||
"@com_github_golang_protobuf//ptypes/empty",
|
||||
"@com_github_pkg_errors//:go_default_library",
|
||||
"@com_github_prometheus_client_golang//prometheus:go_default_library",
|
||||
"@com_github_prometheus_client_golang//prometheus/promauto:go_default_library",
|
||||
"@com_github_sirupsen_logrus//:go_default_library",
|
||||
"@org_golang_google_grpc//:go_default_library",
|
||||
"@org_golang_x_sync//errgroup:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "go_default_test",
|
||||
srcs = [
|
||||
"attestation_data_test.go",
|
||||
"beacon_api_helpers_test.go",
|
||||
"beacon_block_converter_test.go",
|
||||
"beacon_block_json_helpers_test.go",
|
||||
"beacon_block_proto_helpers_test.go",
|
||||
"domain_data_test.go",
|
||||
"doppelganger_test.go",
|
||||
"duties_test.go",
|
||||
"get_beacon_block_test.go",
|
||||
"grpc_client_test.go",
|
||||
"index_test.go",
|
||||
"prepare_beacon_proposer_test.go",
|
||||
"propose_attestation_test.go",
|
||||
"propose_beacon_block_altair_test.go",
|
||||
"propose_beacon_block_blinded_bellatrix_test.go",
|
||||
"propose_beacon_block_blinded_capella_test.go",
|
||||
"propose_beacon_block_blinded_deneb_test.go",
|
||||
"propose_beacon_block_blinded_electra_test.go",
|
||||
"propose_beacon_block_blinded_fulu_test.go",
|
||||
"propose_beacon_block_capella_test.go",
|
||||
"propose_beacon_block_deneb_test.go",
|
||||
"propose_beacon_block_electra_test.go",
|
||||
"propose_beacon_block_fulu_test.go",
|
||||
"propose_beacon_block_phase0_test.go",
|
||||
"propose_beacon_block_test.go",
|
||||
"propose_exit_test.go",
|
||||
"registration_test.go",
|
||||
"rest_client_test.go",
|
||||
"status_test.go",
|
||||
"stream_blocks_test.go",
|
||||
"submit_aggregate_selection_proof_test.go",
|
||||
"submit_signed_aggregate_proof_test.go",
|
||||
"submit_signed_contribution_and_proof_test.go",
|
||||
"subscribe_committee_subnets_test.go",
|
||||
"sync_committee_selections_test.go",
|
||||
"sync_committee_test.go",
|
||||
"wait_for_chain_start_test.go",
|
||||
],
|
||||
embed = [":go_default_library"],
|
||||
deps = [
|
||||
"//api/client:go_default_library",
|
||||
"//api/client/apiutil:go_default_library",
|
||||
"//api/client/beacon:go_default_library",
|
||||
"//api/client/beacon/mock:go_default_library",
|
||||
"//api/client/beacon/node:go_default_library",
|
||||
"//api/client/beacon/prysm_api:go_default_library",
|
||||
"//api/client/beacon/shared_providers:go_default_library",
|
||||
"//api/client/beacon/validator_api/test_helpers:go_default_library",
|
||||
"//api/client/event:go_default_library",
|
||||
"//api/server/structs:go_default_library",
|
||||
"//beacon-chain/core/helpers:go_default_library",
|
||||
"//beacon-chain/rpc/eth/shared/testing:go_default_library",
|
||||
"//config/params:go_default_library",
|
||||
"//consensus-types/primitives:go_default_library",
|
||||
"//encoding/bytesutil:go_default_library",
|
||||
"//network/httputil:go_default_library",
|
||||
"//proto/engine/v1:go_default_library",
|
||||
"//proto/prysm/v1alpha1:go_default_library",
|
||||
"//runtime/version:go_default_library",
|
||||
"//testing/assert:go_default_library",
|
||||
"//testing/mock:go_default_library",
|
||||
"//testing/require:go_default_library",
|
||||
"//time/slots:go_default_library",
|
||||
"@com_github_ethereum_go_ethereum//common/hexutil:go_default_library",
|
||||
"@com_github_golang_protobuf//ptypes/empty",
|
||||
"@com_github_pkg_errors//:go_default_library",
|
||||
"@com_github_sirupsen_logrus//hooks/test:go_default_library",
|
||||
"@org_golang_google_protobuf//types/known/emptypb:go_default_library",
|
||||
"@org_uber_go_mock//gomock:go_default_library",
|
||||
],
|
||||
)
|
||||
@@ -1,107 +0,0 @@
|
||||
package validator_api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/url"
|
||||
"strconv"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/apiutil"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
)
|
||||
|
||||
func (c *beaconApiValidatorClient) attestationData(
|
||||
ctx context.Context,
|
||||
reqSlot primitives.Slot,
|
||||
reqCommitteeIndex primitives.CommitteeIndex,
|
||||
) (*ethpb.AttestationData, error) {
|
||||
params := url.Values{}
|
||||
params.Add("slot", strconv.FormatUint(uint64(reqSlot), 10))
|
||||
params.Add("committee_index", strconv.FormatUint(uint64(reqCommitteeIndex), 10))
|
||||
|
||||
query := apiutil.BuildURL("/eth/v1/validator/attestation_data", params)
|
||||
produceAttestationDataResponseJson := structs.GetAttestationDataResponse{}
|
||||
|
||||
if err := c.jsonRestHandler.Get(ctx, query, &produceAttestationDataResponseJson); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if produceAttestationDataResponseJson.Data == nil {
|
||||
return nil, errors.New("attestation data is nil")
|
||||
}
|
||||
|
||||
attestationData := produceAttestationDataResponseJson.Data
|
||||
committeeIndex, err := strconv.ParseUint(attestationData.CommitteeIndex, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse attestation committee index: %s", attestationData.CommitteeIndex)
|
||||
}
|
||||
|
||||
if !apiutil.ValidRoot(attestationData.BeaconBlockRoot) {
|
||||
return nil, errors.Errorf("invalid beacon block root: %s", attestationData.BeaconBlockRoot)
|
||||
}
|
||||
|
||||
beaconBlockRoot, err := hexutil.Decode(attestationData.BeaconBlockRoot)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode beacon block root: %s", attestationData.BeaconBlockRoot)
|
||||
}
|
||||
|
||||
slot, err := strconv.ParseUint(attestationData.Slot, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse attestation slot: %s", attestationData.Slot)
|
||||
}
|
||||
|
||||
if attestationData.Source == nil {
|
||||
return nil, errors.New("attestation source is nil")
|
||||
}
|
||||
|
||||
sourceEpoch, err := strconv.ParseUint(attestationData.Source.Epoch, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse attestation source epoch: %s", attestationData.Source.Epoch)
|
||||
}
|
||||
|
||||
if !apiutil.ValidRoot(attestationData.Source.Root) {
|
||||
return nil, errors.Errorf("invalid attestation source root: %s", attestationData.Source.Root)
|
||||
}
|
||||
|
||||
sourceRoot, err := hexutil.Decode(attestationData.Source.Root)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode attestation source root: %s", attestationData.Source.Root)
|
||||
}
|
||||
|
||||
if attestationData.Target == nil {
|
||||
return nil, errors.New("attestation target is nil")
|
||||
}
|
||||
|
||||
targetEpoch, err := strconv.ParseUint(attestationData.Target.Epoch, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse attestation target epoch: %s", attestationData.Target.Epoch)
|
||||
}
|
||||
|
||||
if !apiutil.ValidRoot(attestationData.Target.Root) {
|
||||
return nil, errors.Errorf("invalid attestation target root: %s", attestationData.Target.Root)
|
||||
}
|
||||
|
||||
targetRoot, err := hexutil.Decode(attestationData.Target.Root)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode attestation target root: %s", attestationData.Target.Root)
|
||||
}
|
||||
|
||||
response := ðpb.AttestationData{
|
||||
BeaconBlockRoot: beaconBlockRoot,
|
||||
CommitteeIndex: primitives.CommitteeIndex(committeeIndex),
|
||||
Slot: primitives.Slot(slot),
|
||||
Source: ðpb.Checkpoint{
|
||||
Epoch: primitives.Epoch(sourceEpoch),
|
||||
Root: sourceRoot,
|
||||
},
|
||||
Target: ðpb.Checkpoint{
|
||||
Epoch: primitives.Epoch(targetEpoch),
|
||||
Root: targetRoot,
|
||||
},
|
||||
}
|
||||
|
||||
return response, nil
|
||||
}
|
||||
@@ -1,243 +0,0 @@
|
||||
package validator_api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/mock"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
"go.uber.org/mock/gomock"
|
||||
)
|
||||
|
||||
func TestGetAttestationData_ValidAttestation(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
expectedSlot := uint64(5)
|
||||
expectedCommitteeIndex := uint64(6)
|
||||
expectedBeaconBlockRoot := "0x0636045df9bdda3ab96592cf5389032c8ec3977f911e2b53509b348dfe164d4d"
|
||||
expectedSourceEpoch := uint64(7)
|
||||
expectedSourceRoot := "0xd4bcbdefc8156e85247681086e8050e5d2d5d1bf076a25f6decd99250f3a378d"
|
||||
expectedTargetEpoch := uint64(8)
|
||||
expectedTargetRoot := "0x246590e8e4c2a9bd13cc776ecc7025bc432219f076e80b27267b8fa0456dc821"
|
||||
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
produceAttestationDataResponseJson := structs.GetAttestationDataResponse{}
|
||||
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
fmt.Sprintf("/eth/v1/validator/attestation_data?committee_index=%d&slot=%d", expectedCommitteeIndex, expectedSlot),
|
||||
&produceAttestationDataResponseJson,
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
structs.GetAttestationDataResponse{
|
||||
Data: &structs.AttestationData{
|
||||
Slot: strconv.FormatUint(expectedSlot, 10),
|
||||
CommitteeIndex: strconv.FormatUint(expectedCommitteeIndex, 10),
|
||||
BeaconBlockRoot: expectedBeaconBlockRoot,
|
||||
Source: &structs.Checkpoint{
|
||||
Epoch: strconv.FormatUint(expectedSourceEpoch, 10),
|
||||
Root: expectedSourceRoot,
|
||||
},
|
||||
Target: &structs.Checkpoint{
|
||||
Epoch: strconv.FormatUint(expectedTargetEpoch, 10),
|
||||
Root: expectedTargetRoot,
|
||||
},
|
||||
},
|
||||
},
|
||||
).Times(1)
|
||||
|
||||
validatorClient := &beaconApiValidatorClient{jsonRestHandler: jsonRestHandler}
|
||||
resp, err := validatorClient.attestationData(ctx, primitives.Slot(expectedSlot), primitives.CommitteeIndex(expectedCommitteeIndex))
|
||||
assert.NoError(t, err)
|
||||
|
||||
require.NotNil(t, resp)
|
||||
assert.Equal(t, expectedBeaconBlockRoot, hexutil.Encode(resp.BeaconBlockRoot))
|
||||
assert.Equal(t, expectedCommitteeIndex, uint64(resp.CommitteeIndex))
|
||||
assert.Equal(t, expectedSlot, uint64(resp.Slot))
|
||||
|
||||
require.NotNil(t, resp.Source)
|
||||
assert.Equal(t, expectedSourceEpoch, uint64(resp.Source.Epoch))
|
||||
assert.Equal(t, expectedSourceRoot, hexutil.Encode(resp.Source.Root))
|
||||
|
||||
require.NotNil(t, resp.Target)
|
||||
assert.Equal(t, expectedTargetEpoch, uint64(resp.Target.Epoch))
|
||||
assert.Equal(t, expectedTargetRoot, hexutil.Encode(resp.Target.Root))
|
||||
}
|
||||
|
||||
func TestGetAttestationData_InvalidData(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
generateData func() structs.GetAttestationDataResponse
|
||||
expectedErrorMessage string
|
||||
}{
|
||||
{
|
||||
name: "nil attestation data",
|
||||
generateData: func() structs.GetAttestationDataResponse {
|
||||
return structs.GetAttestationDataResponse{
|
||||
Data: nil,
|
||||
}
|
||||
},
|
||||
expectedErrorMessage: "attestation data is nil",
|
||||
},
|
||||
{
|
||||
name: "invalid committee index",
|
||||
generateData: func() structs.GetAttestationDataResponse {
|
||||
attestation := generateValidAttestation(1, 2)
|
||||
attestation.Data.CommitteeIndex = "foo"
|
||||
return attestation
|
||||
},
|
||||
expectedErrorMessage: "failed to parse attestation committee index: foo",
|
||||
},
|
||||
{
|
||||
name: "invalid block root",
|
||||
generateData: func() structs.GetAttestationDataResponse {
|
||||
attestation := generateValidAttestation(1, 2)
|
||||
attestation.Data.BeaconBlockRoot = "foo"
|
||||
return attestation
|
||||
},
|
||||
expectedErrorMessage: "invalid beacon block root: foo",
|
||||
},
|
||||
{
|
||||
name: "invalid slot",
|
||||
generateData: func() structs.GetAttestationDataResponse {
|
||||
attestation := generateValidAttestation(1, 2)
|
||||
attestation.Data.Slot = "foo"
|
||||
return attestation
|
||||
},
|
||||
expectedErrorMessage: "failed to parse attestation slot: foo",
|
||||
},
|
||||
{
|
||||
name: "nil source",
|
||||
generateData: func() structs.GetAttestationDataResponse {
|
||||
attestation := generateValidAttestation(1, 2)
|
||||
attestation.Data.Source = nil
|
||||
return attestation
|
||||
},
|
||||
expectedErrorMessage: "attestation source is nil",
|
||||
},
|
||||
{
|
||||
name: "invalid source epoch",
|
||||
generateData: func() structs.GetAttestationDataResponse {
|
||||
attestation := generateValidAttestation(1, 2)
|
||||
attestation.Data.Source.Epoch = "foo"
|
||||
return attestation
|
||||
},
|
||||
expectedErrorMessage: "failed to parse attestation source epoch: foo",
|
||||
},
|
||||
{
|
||||
name: "invalid source root",
|
||||
generateData: func() structs.GetAttestationDataResponse {
|
||||
attestation := generateValidAttestation(1, 2)
|
||||
attestation.Data.Source.Root = "foo"
|
||||
return attestation
|
||||
},
|
||||
expectedErrorMessage: "invalid attestation source root: foo",
|
||||
},
|
||||
{
|
||||
name: "nil target",
|
||||
generateData: func() structs.GetAttestationDataResponse {
|
||||
attestation := generateValidAttestation(1, 2)
|
||||
attestation.Data.Target = nil
|
||||
return attestation
|
||||
},
|
||||
expectedErrorMessage: "attestation target is nil",
|
||||
},
|
||||
{
|
||||
name: "invalid target epoch",
|
||||
generateData: func() structs.GetAttestationDataResponse {
|
||||
attestation := generateValidAttestation(1, 2)
|
||||
attestation.Data.Target.Epoch = "foo"
|
||||
return attestation
|
||||
},
|
||||
expectedErrorMessage: "failed to parse attestation target epoch: foo",
|
||||
},
|
||||
{
|
||||
name: "invalid target root",
|
||||
generateData: func() structs.GetAttestationDataResponse {
|
||||
attestation := generateValidAttestation(1, 2)
|
||||
attestation.Data.Target.Root = "foo"
|
||||
return attestation
|
||||
},
|
||||
expectedErrorMessage: "invalid attestation target root: foo",
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
produceAttestationDataResponseJson := structs.GetAttestationDataResponse{}
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
"/eth/v1/validator/attestation_data?committee_index=2&slot=1",
|
||||
&produceAttestationDataResponseJson,
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
testCase.generateData(),
|
||||
).Times(1)
|
||||
|
||||
validatorClient := &beaconApiValidatorClient{jsonRestHandler: jsonRestHandler}
|
||||
_, err := validatorClient.attestationData(ctx, 1, 2)
|
||||
assert.ErrorContains(t, testCase.expectedErrorMessage, err)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetAttestationData_JsonResponseError(t *testing.T) {
|
||||
const slot = primitives.Slot(1)
|
||||
const committeeIndex = primitives.CommitteeIndex(2)
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
produceAttestationDataResponseJson := structs.GetAttestationDataResponse{}
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
fmt.Sprintf("/eth/v1/validator/attestation_data?committee_index=%d&slot=%d", committeeIndex, slot),
|
||||
&produceAttestationDataResponseJson,
|
||||
).Return(
|
||||
errors.New("some specific json response error"),
|
||||
).Times(1)
|
||||
|
||||
validatorClient := &beaconApiValidatorClient{jsonRestHandler: jsonRestHandler}
|
||||
_, err := validatorClient.attestationData(ctx, slot, committeeIndex)
|
||||
assert.ErrorContains(t, "some specific json response error", err)
|
||||
}
|
||||
|
||||
func generateValidAttestation(slot, committeeIndex uint64) structs.GetAttestationDataResponse {
|
||||
return structs.GetAttestationDataResponse{
|
||||
Data: &structs.AttestationData{
|
||||
Slot: strconv.FormatUint(slot, 10),
|
||||
CommitteeIndex: strconv.FormatUint(committeeIndex, 10),
|
||||
BeaconBlockRoot: "0x5ecf3bff35e39d5f75476d42950d549f81fa93038c46b6652ae89ae1f7ad834f",
|
||||
Source: &structs.Checkpoint{
|
||||
Epoch: "3",
|
||||
Root: "0x9023c9e64f23c1d451d5073c641f5f69597c2ad7d82f6f16e67d703e0ce5db8b",
|
||||
},
|
||||
Target: &structs.Checkpoint{
|
||||
Epoch: "4",
|
||||
Root: "0xb154d46803b15b458ca822466547b054bc124338c6ee1d9c433dcde8c4457cca",
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -1,97 +0,0 @@
|
||||
package validator_api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
)
|
||||
|
||||
var beaconAPITogRPCValidatorStatus = map[string]ethpb.ValidatorStatus{
|
||||
"pending_initialized": ethpb.ValidatorStatus_DEPOSITED,
|
||||
"pending_queued": ethpb.ValidatorStatus_PENDING,
|
||||
"active_ongoing": ethpb.ValidatorStatus_ACTIVE,
|
||||
"active_exiting": ethpb.ValidatorStatus_EXITING,
|
||||
"active_slashed": ethpb.ValidatorStatus_SLASHING,
|
||||
"exited_unslashed": ethpb.ValidatorStatus_EXITED,
|
||||
"exited_slashed": ethpb.ValidatorStatus_EXITED,
|
||||
"withdrawal_possible": ethpb.ValidatorStatus_EXITED,
|
||||
"withdrawal_done": ethpb.ValidatorStatus_EXITED,
|
||||
}
|
||||
|
||||
func (c *beaconApiValidatorClient) fork(ctx context.Context) (*structs.GetStateForkResponse, error) {
|
||||
const endpoint = "/eth/v1/beacon/states/head/fork"
|
||||
|
||||
stateForkResponseJson := &structs.GetStateForkResponse{}
|
||||
|
||||
if err := c.jsonRestHandler.Get(ctx, endpoint, stateForkResponseJson); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return stateForkResponseJson, nil
|
||||
}
|
||||
|
||||
func (c *beaconApiValidatorClient) headers(ctx context.Context) (*structs.GetBlockHeadersResponse, error) {
|
||||
const endpoint = "/eth/v1/beacon/headers"
|
||||
|
||||
blockHeadersResponseJson := &structs.GetBlockHeadersResponse{}
|
||||
|
||||
if err := c.jsonRestHandler.Get(ctx, endpoint, blockHeadersResponseJson); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return blockHeadersResponseJson, nil
|
||||
}
|
||||
|
||||
func (c *beaconApiValidatorClient) liveness(ctx context.Context, epoch primitives.Epoch, validatorIndexes []string) (*structs.GetLivenessResponse, error) {
|
||||
const endpoint = "/eth/v1/validator/liveness/"
|
||||
url := endpoint + strconv.FormatUint(uint64(epoch), 10)
|
||||
|
||||
livenessResponseJson := &structs.GetLivenessResponse{}
|
||||
|
||||
marshalledJsonValidatorIndexes, err := json.Marshal(validatorIndexes)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to marshal validator indexes")
|
||||
}
|
||||
|
||||
if err = c.jsonRestHandler.Post(ctx, url, nil, bytes.NewBuffer(marshalledJsonValidatorIndexes), livenessResponseJson); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return livenessResponseJson, nil
|
||||
}
|
||||
|
||||
func (c *beaconApiValidatorClient) syncing(ctx context.Context) (*structs.SyncStatusResponse, error) {
|
||||
const endpoint = "/eth/v1/node/syncing"
|
||||
|
||||
syncingResponseJson := &structs.SyncStatusResponse{}
|
||||
|
||||
if err := c.jsonRestHandler.Get(ctx, endpoint, syncingResponseJson); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return syncingResponseJson, nil
|
||||
}
|
||||
|
||||
func (c *beaconApiValidatorClient) isSyncing(ctx context.Context) (bool, error) {
|
||||
response, err := c.syncing(ctx)
|
||||
if err != nil || response == nil || response.Data == nil {
|
||||
return true, errors.Wrapf(err, "failed to get syncing status")
|
||||
}
|
||||
|
||||
return response.Data.IsSyncing, err
|
||||
}
|
||||
|
||||
func (c *beaconApiValidatorClient) isOptimistic(ctx context.Context) (bool, error) {
|
||||
response, err := c.syncing(ctx)
|
||||
if err != nil || response == nil || response.Data == nil {
|
||||
return true, errors.Wrapf(err, "failed to get syncing status")
|
||||
}
|
||||
|
||||
return response.Data.IsOptimistic, err
|
||||
}
|
||||
@@ -1,373 +0,0 @@
|
||||
package validator_api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"net/url"
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v5/api/apiutil"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/mock"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
"go.uber.org/mock/gomock"
|
||||
)
|
||||
|
||||
func TestBeaconApiHelpers(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
valid bool
|
||||
}{
|
||||
{
|
||||
name: "correct format",
|
||||
input: "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
valid: true,
|
||||
},
|
||||
{
|
||||
name: "root too small",
|
||||
input: "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f",
|
||||
valid: false,
|
||||
},
|
||||
{
|
||||
name: "root too big",
|
||||
input: "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f22",
|
||||
valid: false,
|
||||
},
|
||||
{
|
||||
name: "empty root",
|
||||
input: "",
|
||||
valid: false,
|
||||
},
|
||||
{
|
||||
name: "no 0x prefix",
|
||||
input: "cf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
valid: false,
|
||||
},
|
||||
{
|
||||
name: "invalid characters",
|
||||
input: "0xzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz",
|
||||
valid: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
assert.Equal(t, tt.valid, apiutil.ValidRoot(tt.input))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestBeaconApiHelpers_TestUint64ToString(t *testing.T) {
|
||||
const expectedResult = "1234"
|
||||
const val = uint64(1234)
|
||||
|
||||
assert.Equal(t, expectedResult, apiutil.Uint64ToString(val))
|
||||
assert.Equal(t, expectedResult, apiutil.Uint64ToString(primitives.Slot(val)))
|
||||
assert.Equal(t, expectedResult, apiutil.Uint64ToString(primitives.ValidatorIndex(val)))
|
||||
assert.Equal(t, expectedResult, apiutil.Uint64ToString(primitives.CommitteeIndex(val)))
|
||||
assert.Equal(t, expectedResult, apiutil.Uint64ToString(primitives.Epoch(val)))
|
||||
}
|
||||
|
||||
func TestBuildURL_NoParams(t *testing.T) {
|
||||
wanted := "/aaa/bbb/ccc"
|
||||
actual := apiutil.BuildURL("/aaa/bbb/ccc")
|
||||
assert.Equal(t, wanted, actual)
|
||||
}
|
||||
|
||||
func TestBuildURL_WithParams(t *testing.T) {
|
||||
params := url.Values{}
|
||||
params.Add("xxxx", "1")
|
||||
params.Add("yyyy", "2")
|
||||
params.Add("zzzz", "3")
|
||||
|
||||
wanted := "/aaa/bbb/ccc?xxxx=1&yyyy=2&zzzz=3"
|
||||
actual := apiutil.BuildURL("/aaa/bbb/ccc", params)
|
||||
assert.Equal(t, wanted, actual)
|
||||
}
|
||||
|
||||
const forkEndpoint = "/eth/v1/beacon/states/head/fork"
|
||||
|
||||
func TestGetFork_Nominal(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
stateForkResponseJson := structs.GetStateForkResponse{}
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
|
||||
expected := structs.GetStateForkResponse{
|
||||
Data: &structs.Fork{
|
||||
PreviousVersion: "0x1",
|
||||
CurrentVersion: "0x2",
|
||||
Epoch: "3",
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
forkEndpoint,
|
||||
&stateForkResponseJson,
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
expected,
|
||||
).Times(1)
|
||||
|
||||
validatorClient := beaconApiValidatorClient{
|
||||
jsonRestHandler: jsonRestHandler,
|
||||
}
|
||||
|
||||
fork, err := validatorClient.fork(ctx)
|
||||
require.NoError(t, err)
|
||||
assert.DeepEqual(t, &expected, fork)
|
||||
}
|
||||
|
||||
func TestGetFork_Invalid(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
forkEndpoint,
|
||||
gomock.Any(),
|
||||
).Return(
|
||||
errors.New("custom error"),
|
||||
).Times(1)
|
||||
|
||||
validatorClient := beaconApiValidatorClient{
|
||||
jsonRestHandler: jsonRestHandler,
|
||||
}
|
||||
|
||||
_, err := validatorClient.fork(ctx)
|
||||
require.ErrorContains(t, "custom error", err)
|
||||
}
|
||||
|
||||
const headersEndpoint = "/eth/v1/beacon/headers"
|
||||
|
||||
func TestGetHeaders_Nominal(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
blockHeadersResponseJson := structs.GetBlockHeadersResponse{}
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
|
||||
expected := structs.GetBlockHeadersResponse{
|
||||
Data: []*structs.SignedBeaconBlockHeaderContainer{
|
||||
{
|
||||
Header: &structs.SignedBeaconBlockHeader{
|
||||
Message: &structs.BeaconBlockHeader{
|
||||
Slot: "42",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
headersEndpoint,
|
||||
&blockHeadersResponseJson,
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
expected,
|
||||
).Times(1)
|
||||
|
||||
validatorClient := beaconApiValidatorClient{
|
||||
jsonRestHandler: jsonRestHandler,
|
||||
}
|
||||
|
||||
headers, err := validatorClient.headers(ctx)
|
||||
require.NoError(t, err)
|
||||
assert.DeepEqual(t, &expected, headers)
|
||||
}
|
||||
|
||||
func TestGetHeaders_Invalid(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
headersEndpoint,
|
||||
gomock.Any(),
|
||||
).Return(
|
||||
errors.New("custom error"),
|
||||
).Times(1)
|
||||
|
||||
validatorClient := beaconApiValidatorClient{
|
||||
jsonRestHandler: jsonRestHandler,
|
||||
}
|
||||
|
||||
_, err := validatorClient.headers(ctx)
|
||||
require.ErrorContains(t, "custom error", err)
|
||||
}
|
||||
|
||||
const livenessEndpoint = "/eth/v1/validator/liveness/42"
|
||||
|
||||
func TestGetLiveness_Nominal(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
livenessResponseJson := structs.GetLivenessResponse{}
|
||||
|
||||
indexes := []string{"1", "2"}
|
||||
marshalledIndexes, err := json.Marshal(indexes)
|
||||
require.NoError(t, err)
|
||||
|
||||
expected := structs.GetLivenessResponse{
|
||||
Data: []*structs.Liveness{
|
||||
{
|
||||
Index: "1",
|
||||
IsLive: true,
|
||||
},
|
||||
{
|
||||
Index: "2",
|
||||
IsLive: false,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Post(
|
||||
gomock.Any(),
|
||||
livenessEndpoint,
|
||||
nil,
|
||||
bytes.NewBuffer(marshalledIndexes),
|
||||
&livenessResponseJson,
|
||||
).SetArg(
|
||||
4,
|
||||
expected,
|
||||
).Return(
|
||||
nil,
|
||||
).Times(1)
|
||||
|
||||
validatorClient := &beaconApiValidatorClient{jsonRestHandler: jsonRestHandler}
|
||||
liveness, err := validatorClient.liveness(ctx, 42, indexes)
|
||||
|
||||
require.NoError(t, err)
|
||||
assert.DeepEqual(t, &expected, liveness)
|
||||
}
|
||||
|
||||
func TestGetLiveness_Invalid(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
jsonRestHandler.EXPECT().Post(
|
||||
gomock.Any(),
|
||||
livenessEndpoint,
|
||||
nil,
|
||||
gomock.Any(),
|
||||
gomock.Any(),
|
||||
).Return(
|
||||
errors.New("custom error"),
|
||||
).Times(1)
|
||||
|
||||
validatorClient := &beaconApiValidatorClient{jsonRestHandler: jsonRestHandler}
|
||||
_, err := validatorClient.liveness(ctx, 42, nil)
|
||||
|
||||
require.ErrorContains(t, "custom error", err)
|
||||
}
|
||||
|
||||
const syncingEndpoint = "/eth/v1/node/syncing"
|
||||
|
||||
func TestGetIsSyncing_Nominal(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
isSyncing bool
|
||||
}{
|
||||
{
|
||||
name: "Syncing",
|
||||
isSyncing: true,
|
||||
},
|
||||
{
|
||||
name: "Not syncing",
|
||||
isSyncing: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
syncingResponseJson := structs.SyncStatusResponse{}
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
|
||||
expected := structs.SyncStatusResponse{
|
||||
Data: &structs.SyncStatusResponseData{
|
||||
IsSyncing: testCase.isSyncing,
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
syncingEndpoint,
|
||||
&syncingResponseJson,
|
||||
).Return(
|
||||
nil,
|
||||
).SetArg(
|
||||
2,
|
||||
expected,
|
||||
).Times(1)
|
||||
|
||||
validatorClient := beaconApiValidatorClient{
|
||||
jsonRestHandler: jsonRestHandler,
|
||||
}
|
||||
|
||||
isSyncing, err := validatorClient.isSyncing(ctx)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, testCase.isSyncing, isSyncing)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetIsSyncing_Invalid(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
syncingResponseJson := structs.SyncStatusResponse{}
|
||||
jsonRestHandler := mock.NewMockJsonRestHandler(ctrl)
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
jsonRestHandler.EXPECT().Get(
|
||||
gomock.Any(),
|
||||
syncingEndpoint,
|
||||
&syncingResponseJson,
|
||||
).Return(
|
||||
errors.New("custom error"),
|
||||
).Times(1)
|
||||
|
||||
validatorClient := beaconApiValidatorClient{
|
||||
jsonRestHandler: jsonRestHandler,
|
||||
}
|
||||
|
||||
isSyncing, err := validatorClient.isSyncing(ctx)
|
||||
assert.Equal(t, true, isSyncing)
|
||||
assert.ErrorContains(t, "failed to get syncing status", err)
|
||||
}
|
||||
@@ -1,423 +0,0 @@
|
||||
package validator_api
|
||||
|
||||
import (
|
||||
"math/big"
|
||||
"strconv"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v5/encoding/bytesutil"
|
||||
enginev1 "github.com/prysmaticlabs/prysm/v5/proto/engine/v1"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
)
|
||||
|
||||
type BeaconBlockConverter interface {
|
||||
ConvertRESTPhase0BlockToProto(block *structs.BeaconBlock) (*ethpb.BeaconBlock, error)
|
||||
ConvertRESTAltairBlockToProto(block *structs.BeaconBlockAltair) (*ethpb.BeaconBlockAltair, error)
|
||||
ConvertRESTBellatrixBlockToProto(block *structs.BeaconBlockBellatrix) (*ethpb.BeaconBlockBellatrix, error)
|
||||
ConvertRESTCapellaBlockToProto(block *structs.BeaconBlockCapella) (*ethpb.BeaconBlockCapella, error)
|
||||
}
|
||||
|
||||
type beaconApiBeaconBlockConverter struct{}
|
||||
|
||||
// ConvertRESTPhase0BlockToProto converts a Phase0 JSON beacon block to its protobuf equivalent
|
||||
func (c beaconApiBeaconBlockConverter) ConvertRESTPhase0BlockToProto(block *structs.BeaconBlock) (*ethpb.BeaconBlock, error) {
|
||||
blockSlot, err := strconv.ParseUint(block.Slot, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse slot `%s`", block.Slot)
|
||||
}
|
||||
|
||||
blockProposerIndex, err := strconv.ParseUint(block.ProposerIndex, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse proposer index `%s`", block.ProposerIndex)
|
||||
}
|
||||
|
||||
parentRoot, err := hexutil.Decode(block.ParentRoot)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode parent root `%s`", block.ParentRoot)
|
||||
}
|
||||
|
||||
stateRoot, err := hexutil.Decode(block.StateRoot)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode state root `%s`", block.StateRoot)
|
||||
}
|
||||
|
||||
if block.Body == nil {
|
||||
return nil, errors.New("block body is nil")
|
||||
}
|
||||
|
||||
randaoReveal, err := hexutil.Decode(block.Body.RandaoReveal)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode randao reveal `%s`", block.Body.RandaoReveal)
|
||||
}
|
||||
|
||||
if block.Body.Eth1Data == nil {
|
||||
return nil, errors.New("eth1 data is nil")
|
||||
}
|
||||
|
||||
depositRoot, err := hexutil.Decode(block.Body.Eth1Data.DepositRoot)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode deposit root `%s`", block.Body.Eth1Data.DepositRoot)
|
||||
}
|
||||
|
||||
depositCount, err := strconv.ParseUint(block.Body.Eth1Data.DepositCount, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse deposit count `%s`", block.Body.Eth1Data.DepositCount)
|
||||
}
|
||||
|
||||
blockHash, err := hexutil.Decode(block.Body.Eth1Data.BlockHash)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode block hash `%s`", block.Body.Eth1Data.BlockHash)
|
||||
}
|
||||
|
||||
graffiti, err := hexutil.Decode(block.Body.Graffiti)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode graffiti `%s`", block.Body.Graffiti)
|
||||
}
|
||||
|
||||
proposerSlashings, err := convertProposerSlashingsToProto(block.Body.ProposerSlashings)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get proposer slashings")
|
||||
}
|
||||
|
||||
attesterSlashings, err := convertAttesterSlashingsToProto(block.Body.AttesterSlashings)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get attester slashings")
|
||||
}
|
||||
|
||||
attestations, err := convertAttestationsToProto(block.Body.Attestations)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get attestations")
|
||||
}
|
||||
|
||||
deposits, err := convertDepositsToProto(block.Body.Deposits)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get deposits")
|
||||
}
|
||||
|
||||
voluntaryExits, err := convertVoluntaryExitsToProto(block.Body.VoluntaryExits)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get voluntary exits")
|
||||
}
|
||||
|
||||
return ðpb.BeaconBlock{
|
||||
Slot: primitives.Slot(blockSlot),
|
||||
ProposerIndex: primitives.ValidatorIndex(blockProposerIndex),
|
||||
ParentRoot: parentRoot,
|
||||
StateRoot: stateRoot,
|
||||
Body: ðpb.BeaconBlockBody{
|
||||
RandaoReveal: randaoReveal,
|
||||
Eth1Data: ðpb.Eth1Data{
|
||||
DepositRoot: depositRoot,
|
||||
DepositCount: depositCount,
|
||||
BlockHash: blockHash,
|
||||
},
|
||||
Graffiti: graffiti,
|
||||
ProposerSlashings: proposerSlashings,
|
||||
AttesterSlashings: attesterSlashings,
|
||||
Attestations: attestations,
|
||||
Deposits: deposits,
|
||||
VoluntaryExits: voluntaryExits,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// ConvertRESTAltairBlockToProto converts an Altair JSON beacon block to its protobuf equivalent
|
||||
func (c beaconApiBeaconBlockConverter) ConvertRESTAltairBlockToProto(block *structs.BeaconBlockAltair) (*ethpb.BeaconBlockAltair, error) {
|
||||
if block.Body == nil {
|
||||
return nil, errors.New("block body is nil")
|
||||
}
|
||||
|
||||
// Call convertRESTPhase0BlockToProto to set the phase0 fields because all the error handling and the heavy lifting
|
||||
// has already been done
|
||||
phase0Block, err := c.ConvertRESTPhase0BlockToProto(&structs.BeaconBlock{
|
||||
Slot: block.Slot,
|
||||
ProposerIndex: block.ProposerIndex,
|
||||
ParentRoot: block.ParentRoot,
|
||||
StateRoot: block.StateRoot,
|
||||
Body: &structs.BeaconBlockBody{
|
||||
RandaoReveal: block.Body.RandaoReveal,
|
||||
Eth1Data: block.Body.Eth1Data,
|
||||
Graffiti: block.Body.Graffiti,
|
||||
ProposerSlashings: block.Body.ProposerSlashings,
|
||||
AttesterSlashings: block.Body.AttesterSlashings,
|
||||
Attestations: block.Body.Attestations,
|
||||
Deposits: block.Body.Deposits,
|
||||
VoluntaryExits: block.Body.VoluntaryExits,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get the phase0 fields of the altair block")
|
||||
}
|
||||
|
||||
if block.Body.SyncAggregate == nil {
|
||||
return nil, errors.New("sync aggregate is nil")
|
||||
}
|
||||
|
||||
syncCommitteeBits, err := hexutil.Decode(block.Body.SyncAggregate.SyncCommitteeBits)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode sync committee bits `%s`", block.Body.SyncAggregate.SyncCommitteeBits)
|
||||
}
|
||||
|
||||
syncCommitteeSignature, err := hexutil.Decode(block.Body.SyncAggregate.SyncCommitteeSignature)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode sync committee signature `%s`", block.Body.SyncAggregate.SyncCommitteeSignature)
|
||||
}
|
||||
|
||||
return ðpb.BeaconBlockAltair{
|
||||
Slot: phase0Block.Slot,
|
||||
ProposerIndex: phase0Block.ProposerIndex,
|
||||
ParentRoot: phase0Block.ParentRoot,
|
||||
StateRoot: phase0Block.StateRoot,
|
||||
Body: ðpb.BeaconBlockBodyAltair{
|
||||
RandaoReveal: phase0Block.Body.RandaoReveal,
|
||||
Eth1Data: phase0Block.Body.Eth1Data,
|
||||
Graffiti: phase0Block.Body.Graffiti,
|
||||
ProposerSlashings: phase0Block.Body.ProposerSlashings,
|
||||
AttesterSlashings: phase0Block.Body.AttesterSlashings,
|
||||
Attestations: phase0Block.Body.Attestations,
|
||||
Deposits: phase0Block.Body.Deposits,
|
||||
VoluntaryExits: phase0Block.Body.VoluntaryExits,
|
||||
SyncAggregate: ðpb.SyncAggregate{
|
||||
SyncCommitteeBits: syncCommitteeBits,
|
||||
SyncCommitteeSignature: syncCommitteeSignature,
|
||||
},
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// ConvertRESTBellatrixBlockToProto converts a Bellatrix JSON beacon block to its protobuf equivalent
|
||||
func (c beaconApiBeaconBlockConverter) ConvertRESTBellatrixBlockToProto(block *structs.BeaconBlockBellatrix) (*ethpb.BeaconBlockBellatrix, error) {
|
||||
if block.Body == nil {
|
||||
return nil, errors.New("block body is nil")
|
||||
}
|
||||
|
||||
// Call convertRESTAltairBlockToProto to set the altair fields because all the error handling and the heavy lifting
|
||||
// has already been done
|
||||
altairBlock, err := c.ConvertRESTAltairBlockToProto(&structs.BeaconBlockAltair{
|
||||
Slot: block.Slot,
|
||||
ProposerIndex: block.ProposerIndex,
|
||||
ParentRoot: block.ParentRoot,
|
||||
StateRoot: block.StateRoot,
|
||||
Body: &structs.BeaconBlockBodyAltair{
|
||||
RandaoReveal: block.Body.RandaoReveal,
|
||||
Eth1Data: block.Body.Eth1Data,
|
||||
Graffiti: block.Body.Graffiti,
|
||||
ProposerSlashings: block.Body.ProposerSlashings,
|
||||
AttesterSlashings: block.Body.AttesterSlashings,
|
||||
Attestations: block.Body.Attestations,
|
||||
Deposits: block.Body.Deposits,
|
||||
VoluntaryExits: block.Body.VoluntaryExits,
|
||||
SyncAggregate: block.Body.SyncAggregate,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get the altair fields of the bellatrix block")
|
||||
}
|
||||
|
||||
if block.Body.ExecutionPayload == nil {
|
||||
return nil, errors.New("execution payload is nil")
|
||||
}
|
||||
|
||||
parentHash, err := hexutil.Decode(block.Body.ExecutionPayload.ParentHash)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode execution payload parent hash `%s`", block.Body.ExecutionPayload.ParentHash)
|
||||
}
|
||||
|
||||
feeRecipient, err := hexutil.Decode(block.Body.ExecutionPayload.FeeRecipient)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode execution payload fee recipient `%s`", block.Body.ExecutionPayload.FeeRecipient)
|
||||
}
|
||||
|
||||
stateRoot, err := hexutil.Decode(block.Body.ExecutionPayload.StateRoot)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode execution payload state root `%s`", block.Body.ExecutionPayload.StateRoot)
|
||||
}
|
||||
|
||||
receiptsRoot, err := hexutil.Decode(block.Body.ExecutionPayload.ReceiptsRoot)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode execution payload receipts root `%s`", block.Body.ExecutionPayload.ReceiptsRoot)
|
||||
}
|
||||
|
||||
logsBloom, err := hexutil.Decode(block.Body.ExecutionPayload.LogsBloom)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode execution payload logs bloom `%s`", block.Body.ExecutionPayload.LogsBloom)
|
||||
}
|
||||
|
||||
prevRandao, err := hexutil.Decode(block.Body.ExecutionPayload.PrevRandao)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode execution payload prev randao `%s`", block.Body.ExecutionPayload.PrevRandao)
|
||||
}
|
||||
|
||||
blockNumber, err := strconv.ParseUint(block.Body.ExecutionPayload.BlockNumber, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse execution payload block number `%s`", block.Body.ExecutionPayload.BlockNumber)
|
||||
}
|
||||
|
||||
gasLimit, err := strconv.ParseUint(block.Body.ExecutionPayload.GasLimit, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse execution payload gas limit `%s`", block.Body.ExecutionPayload.GasLimit)
|
||||
}
|
||||
|
||||
gasUsed, err := strconv.ParseUint(block.Body.ExecutionPayload.GasUsed, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse execution payload gas used `%s`", block.Body.ExecutionPayload.GasUsed)
|
||||
}
|
||||
|
||||
timestamp, err := strconv.ParseUint(block.Body.ExecutionPayload.Timestamp, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse execution payload timestamp `%s`", block.Body.ExecutionPayload.Timestamp)
|
||||
}
|
||||
|
||||
extraData, err := hexutil.Decode(block.Body.ExecutionPayload.ExtraData)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode execution payload extra data `%s`", block.Body.ExecutionPayload.ExtraData)
|
||||
}
|
||||
|
||||
baseFeePerGas := new(big.Int)
|
||||
if _, ok := baseFeePerGas.SetString(block.Body.ExecutionPayload.BaseFeePerGas, 10); !ok {
|
||||
return nil, errors.Errorf("failed to parse execution payload base fee per gas `%s`", block.Body.ExecutionPayload.BaseFeePerGas)
|
||||
}
|
||||
|
||||
blockHash, err := hexutil.Decode(block.Body.ExecutionPayload.BlockHash)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode execution payload block hash `%s`", block.Body.ExecutionPayload.BlockHash)
|
||||
}
|
||||
|
||||
transactions, err := convertTransactionsToProto(block.Body.ExecutionPayload.Transactions)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get execution payload transactions")
|
||||
}
|
||||
|
||||
return ðpb.BeaconBlockBellatrix{
|
||||
Slot: altairBlock.Slot,
|
||||
ProposerIndex: altairBlock.ProposerIndex,
|
||||
ParentRoot: altairBlock.ParentRoot,
|
||||
StateRoot: altairBlock.StateRoot,
|
||||
Body: ðpb.BeaconBlockBodyBellatrix{
|
||||
RandaoReveal: altairBlock.Body.RandaoReveal,
|
||||
Eth1Data: altairBlock.Body.Eth1Data,
|
||||
Graffiti: altairBlock.Body.Graffiti,
|
||||
ProposerSlashings: altairBlock.Body.ProposerSlashings,
|
||||
AttesterSlashings: altairBlock.Body.AttesterSlashings,
|
||||
Attestations: altairBlock.Body.Attestations,
|
||||
Deposits: altairBlock.Body.Deposits,
|
||||
VoluntaryExits: altairBlock.Body.VoluntaryExits,
|
||||
SyncAggregate: altairBlock.Body.SyncAggregate,
|
||||
ExecutionPayload: &enginev1.ExecutionPayload{
|
||||
ParentHash: parentHash,
|
||||
FeeRecipient: feeRecipient,
|
||||
StateRoot: stateRoot,
|
||||
ReceiptsRoot: receiptsRoot,
|
||||
LogsBloom: logsBloom,
|
||||
PrevRandao: prevRandao,
|
||||
BlockNumber: blockNumber,
|
||||
GasLimit: gasLimit,
|
||||
GasUsed: gasUsed,
|
||||
Timestamp: timestamp,
|
||||
ExtraData: extraData,
|
||||
BaseFeePerGas: bytesutil.PadTo(bytesutil.BigIntToLittleEndianBytes(baseFeePerGas), 32),
|
||||
BlockHash: blockHash,
|
||||
Transactions: transactions,
|
||||
},
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// ConvertRESTCapellaBlockToProto converts a Capella JSON beacon block to its protobuf equivalent
|
||||
func (c beaconApiBeaconBlockConverter) ConvertRESTCapellaBlockToProto(block *structs.BeaconBlockCapella) (*ethpb.BeaconBlockCapella, error) {
|
||||
if block.Body == nil {
|
||||
return nil, errors.New("block body is nil")
|
||||
}
|
||||
|
||||
if block.Body.ExecutionPayload == nil {
|
||||
return nil, errors.New("execution payload is nil")
|
||||
}
|
||||
|
||||
// Call convertRESTBellatrixBlockToProto to set the bellatrix fields because all the error handling and the heavy
|
||||
// lifting has already been done
|
||||
bellatrixBlock, err := c.ConvertRESTBellatrixBlockToProto(&structs.BeaconBlockBellatrix{
|
||||
Slot: block.Slot,
|
||||
ProposerIndex: block.ProposerIndex,
|
||||
ParentRoot: block.ParentRoot,
|
||||
StateRoot: block.StateRoot,
|
||||
Body: &structs.BeaconBlockBodyBellatrix{
|
||||
RandaoReveal: block.Body.RandaoReveal,
|
||||
Eth1Data: block.Body.Eth1Data,
|
||||
Graffiti: block.Body.Graffiti,
|
||||
ProposerSlashings: block.Body.ProposerSlashings,
|
||||
AttesterSlashings: block.Body.AttesterSlashings,
|
||||
Attestations: block.Body.Attestations,
|
||||
Deposits: block.Body.Deposits,
|
||||
VoluntaryExits: block.Body.VoluntaryExits,
|
||||
SyncAggregate: block.Body.SyncAggregate,
|
||||
ExecutionPayload: &structs.ExecutionPayload{
|
||||
ParentHash: block.Body.ExecutionPayload.ParentHash,
|
||||
FeeRecipient: block.Body.ExecutionPayload.FeeRecipient,
|
||||
StateRoot: block.Body.ExecutionPayload.StateRoot,
|
||||
ReceiptsRoot: block.Body.ExecutionPayload.ReceiptsRoot,
|
||||
LogsBloom: block.Body.ExecutionPayload.LogsBloom,
|
||||
PrevRandao: block.Body.ExecutionPayload.PrevRandao,
|
||||
BlockNumber: block.Body.ExecutionPayload.BlockNumber,
|
||||
GasLimit: block.Body.ExecutionPayload.GasLimit,
|
||||
GasUsed: block.Body.ExecutionPayload.GasUsed,
|
||||
Timestamp: block.Body.ExecutionPayload.Timestamp,
|
||||
ExtraData: block.Body.ExecutionPayload.ExtraData,
|
||||
BaseFeePerGas: block.Body.ExecutionPayload.BaseFeePerGas,
|
||||
BlockHash: block.Body.ExecutionPayload.BlockHash,
|
||||
Transactions: block.Body.ExecutionPayload.Transactions,
|
||||
},
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get the bellatrix fields of the capella block")
|
||||
}
|
||||
|
||||
withdrawals, err := convertWithdrawalsToProto(block.Body.ExecutionPayload.Withdrawals)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get withdrawals")
|
||||
}
|
||||
|
||||
blsToExecutionChanges, err := convertBlsToExecutionChangesToProto(block.Body.BLSToExecutionChanges)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get bls to execution changes")
|
||||
}
|
||||
|
||||
return ðpb.BeaconBlockCapella{
|
||||
Slot: bellatrixBlock.Slot,
|
||||
ProposerIndex: bellatrixBlock.ProposerIndex,
|
||||
ParentRoot: bellatrixBlock.ParentRoot,
|
||||
StateRoot: bellatrixBlock.StateRoot,
|
||||
Body: ðpb.BeaconBlockBodyCapella{
|
||||
RandaoReveal: bellatrixBlock.Body.RandaoReveal,
|
||||
Eth1Data: bellatrixBlock.Body.Eth1Data,
|
||||
Graffiti: bellatrixBlock.Body.Graffiti,
|
||||
ProposerSlashings: bellatrixBlock.Body.ProposerSlashings,
|
||||
AttesterSlashings: bellatrixBlock.Body.AttesterSlashings,
|
||||
Attestations: bellatrixBlock.Body.Attestations,
|
||||
Deposits: bellatrixBlock.Body.Deposits,
|
||||
VoluntaryExits: bellatrixBlock.Body.VoluntaryExits,
|
||||
SyncAggregate: bellatrixBlock.Body.SyncAggregate,
|
||||
ExecutionPayload: &enginev1.ExecutionPayloadCapella{
|
||||
ParentHash: bellatrixBlock.Body.ExecutionPayload.ParentHash,
|
||||
FeeRecipient: bellatrixBlock.Body.ExecutionPayload.FeeRecipient,
|
||||
StateRoot: bellatrixBlock.Body.ExecutionPayload.StateRoot,
|
||||
ReceiptsRoot: bellatrixBlock.Body.ExecutionPayload.ReceiptsRoot,
|
||||
LogsBloom: bellatrixBlock.Body.ExecutionPayload.LogsBloom,
|
||||
PrevRandao: bellatrixBlock.Body.ExecutionPayload.PrevRandao,
|
||||
BlockNumber: bellatrixBlock.Body.ExecutionPayload.BlockNumber,
|
||||
GasLimit: bellatrixBlock.Body.ExecutionPayload.GasLimit,
|
||||
GasUsed: bellatrixBlock.Body.ExecutionPayload.GasUsed,
|
||||
Timestamp: bellatrixBlock.Body.ExecutionPayload.Timestamp,
|
||||
ExtraData: bellatrixBlock.Body.ExecutionPayload.ExtraData,
|
||||
BaseFeePerGas: bellatrixBlock.Body.ExecutionPayload.BaseFeePerGas,
|
||||
BlockHash: bellatrixBlock.Body.ExecutionPayload.BlockHash,
|
||||
Transactions: bellatrixBlock.Body.ExecutionPayload.Transactions,
|
||||
Withdrawals: withdrawals,
|
||||
},
|
||||
BlsToExecutionChanges: blsToExecutionChanges,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
@@ -1,505 +0,0 @@
|
||||
package validator_api
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/validator_api/test_helpers"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
)
|
||||
|
||||
func TestGetBeaconBlockConverter_Phase0Valid(t *testing.T) {
|
||||
expectedBeaconBlock := test_helpers.GenerateProtoPhase0BeaconBlock()
|
||||
beaconBlockConverter := &beaconApiBeaconBlockConverter{}
|
||||
beaconBlock, err := beaconBlockConverter.ConvertRESTPhase0BlockToProto(test_helpers.GenerateJsonPhase0BeaconBlock())
|
||||
require.NoError(t, err)
|
||||
assert.DeepEqual(t, expectedBeaconBlock, beaconBlock)
|
||||
}
|
||||
|
||||
func TestGetBeaconBlockConverter_Phase0Error(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
expectedErrorMessage string
|
||||
generateData func() *structs.BeaconBlock
|
||||
}{
|
||||
{
|
||||
name: "nil body",
|
||||
expectedErrorMessage: "block body is nil",
|
||||
generateData: func() *structs.BeaconBlock {
|
||||
beaconBlock := test_helpers.GenerateJsonPhase0BeaconBlock()
|
||||
beaconBlock.Body = nil
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nil eth1 data",
|
||||
expectedErrorMessage: "eth1 data is nil",
|
||||
generateData: func() *structs.BeaconBlock {
|
||||
beaconBlock := test_helpers.GenerateJsonPhase0BeaconBlock()
|
||||
beaconBlock.Body.Eth1Data = nil
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad slot",
|
||||
expectedErrorMessage: "failed to parse slot `foo`",
|
||||
generateData: func() *structs.BeaconBlock {
|
||||
beaconBlock := test_helpers.GenerateJsonPhase0BeaconBlock()
|
||||
beaconBlock.Slot = "foo"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad proposer index",
|
||||
expectedErrorMessage: "failed to parse proposer index `bar`",
|
||||
generateData: func() *structs.BeaconBlock {
|
||||
beaconBlock := test_helpers.GenerateJsonPhase0BeaconBlock()
|
||||
beaconBlock.ProposerIndex = "bar"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad parent root",
|
||||
expectedErrorMessage: "failed to decode parent root `foo`",
|
||||
generateData: func() *structs.BeaconBlock {
|
||||
beaconBlock := test_helpers.GenerateJsonPhase0BeaconBlock()
|
||||
beaconBlock.ParentRoot = "foo"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad state root",
|
||||
expectedErrorMessage: "failed to decode state root `bar`",
|
||||
generateData: func() *structs.BeaconBlock {
|
||||
beaconBlock := test_helpers.GenerateJsonPhase0BeaconBlock()
|
||||
beaconBlock.StateRoot = "bar"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad randao reveal",
|
||||
expectedErrorMessage: "failed to decode randao reveal `foo`",
|
||||
generateData: func() *structs.BeaconBlock {
|
||||
beaconBlock := test_helpers.GenerateJsonPhase0BeaconBlock()
|
||||
beaconBlock.Body.RandaoReveal = "foo"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad deposit root",
|
||||
expectedErrorMessage: "failed to decode deposit root `bar`",
|
||||
generateData: func() *structs.BeaconBlock {
|
||||
beaconBlock := test_helpers.GenerateJsonPhase0BeaconBlock()
|
||||
beaconBlock.Body.Eth1Data.DepositRoot = "bar"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad deposit count",
|
||||
expectedErrorMessage: "failed to parse deposit count `foo`",
|
||||
generateData: func() *structs.BeaconBlock {
|
||||
beaconBlock := test_helpers.GenerateJsonPhase0BeaconBlock()
|
||||
beaconBlock.Body.Eth1Data.DepositCount = "foo"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad block hash",
|
||||
expectedErrorMessage: "failed to decode block hash `bar`",
|
||||
generateData: func() *structs.BeaconBlock {
|
||||
beaconBlock := test_helpers.GenerateJsonPhase0BeaconBlock()
|
||||
beaconBlock.Body.Eth1Data.BlockHash = "bar"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad graffiti",
|
||||
expectedErrorMessage: "failed to decode graffiti `foo`",
|
||||
generateData: func() *structs.BeaconBlock {
|
||||
beaconBlock := test_helpers.GenerateJsonPhase0BeaconBlock()
|
||||
beaconBlock.Body.Graffiti = "foo"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad proposer slashings",
|
||||
expectedErrorMessage: "failed to get proposer slashings",
|
||||
generateData: func() *structs.BeaconBlock {
|
||||
beaconBlock := test_helpers.GenerateJsonPhase0BeaconBlock()
|
||||
beaconBlock.Body.ProposerSlashings[0] = nil
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad attester slashings",
|
||||
expectedErrorMessage: "failed to get attester slashings",
|
||||
generateData: func() *structs.BeaconBlock {
|
||||
beaconBlock := test_helpers.GenerateJsonPhase0BeaconBlock()
|
||||
beaconBlock.Body.AttesterSlashings[0] = nil
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad attestations",
|
||||
expectedErrorMessage: "failed to get attestations",
|
||||
generateData: func() *structs.BeaconBlock {
|
||||
beaconBlock := test_helpers.GenerateJsonPhase0BeaconBlock()
|
||||
beaconBlock.Body.Attestations[0] = nil
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad deposits",
|
||||
expectedErrorMessage: "failed to get deposits",
|
||||
generateData: func() *structs.BeaconBlock {
|
||||
beaconBlock := test_helpers.GenerateJsonPhase0BeaconBlock()
|
||||
beaconBlock.Body.Deposits[0] = nil
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad voluntary exits",
|
||||
expectedErrorMessage: "failed to get voluntary exits",
|
||||
generateData: func() *structs.BeaconBlock {
|
||||
beaconBlock := test_helpers.GenerateJsonPhase0BeaconBlock()
|
||||
beaconBlock.Body.VoluntaryExits[0] = nil
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
beaconBlockJson := testCase.generateData()
|
||||
|
||||
beaconBlockConverter := &beaconApiBeaconBlockConverter{}
|
||||
_, err := beaconBlockConverter.ConvertRESTPhase0BlockToProto(beaconBlockJson)
|
||||
assert.ErrorContains(t, testCase.expectedErrorMessage, err)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetBeaconBlockConverter_AltairValid(t *testing.T) {
|
||||
expectedBeaconBlock := test_helpers.GenerateProtoAltairBeaconBlock()
|
||||
beaconBlockConverter := &beaconApiBeaconBlockConverter{}
|
||||
beaconBlock, err := beaconBlockConverter.ConvertRESTAltairBlockToProto(test_helpers.GenerateJsonAltairBeaconBlock())
|
||||
require.NoError(t, err)
|
||||
assert.DeepEqual(t, expectedBeaconBlock, beaconBlock)
|
||||
}
|
||||
|
||||
func TestGetBeaconBlockConverter_AltairError(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
expectedErrorMessage string
|
||||
generateData func() *structs.BeaconBlockAltair
|
||||
}{
|
||||
{
|
||||
name: "nil body",
|
||||
expectedErrorMessage: "block body is nil",
|
||||
generateData: func() *structs.BeaconBlockAltair {
|
||||
beaconBlock := test_helpers.GenerateJsonAltairBeaconBlock()
|
||||
beaconBlock.Body = nil
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nil sync aggregate",
|
||||
expectedErrorMessage: "sync aggregate is nil",
|
||||
generateData: func() *structs.BeaconBlockAltair {
|
||||
beaconBlock := test_helpers.GenerateJsonAltairBeaconBlock()
|
||||
beaconBlock.Body.SyncAggregate = nil
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad phase0 fields",
|
||||
expectedErrorMessage: "failed to get the phase0 fields of the altair block",
|
||||
generateData: func() *structs.BeaconBlockAltair {
|
||||
beaconBlock := test_helpers.GenerateJsonAltairBeaconBlock()
|
||||
beaconBlock.Body.Eth1Data = nil
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad sync committee bits",
|
||||
expectedErrorMessage: "failed to decode sync committee bits `foo`",
|
||||
generateData: func() *structs.BeaconBlockAltair {
|
||||
beaconBlock := test_helpers.GenerateJsonAltairBeaconBlock()
|
||||
beaconBlock.Body.SyncAggregate.SyncCommitteeBits = "foo"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad sync committee signature",
|
||||
expectedErrorMessage: "failed to decode sync committee signature `bar`",
|
||||
generateData: func() *structs.BeaconBlockAltair {
|
||||
beaconBlock := test_helpers.GenerateJsonAltairBeaconBlock()
|
||||
beaconBlock.Body.SyncAggregate.SyncCommitteeSignature = "bar"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
beaconBlockJson := testCase.generateData()
|
||||
|
||||
beaconBlockConverter := &beaconApiBeaconBlockConverter{}
|
||||
_, err := beaconBlockConverter.ConvertRESTAltairBlockToProto(beaconBlockJson)
|
||||
assert.ErrorContains(t, testCase.expectedErrorMessage, err)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetBeaconBlockConverter_BellatrixValid(t *testing.T) {
|
||||
expectedBeaconBlock := test_helpers.GenerateProtoBellatrixBeaconBlock()
|
||||
beaconBlockConverter := &beaconApiBeaconBlockConverter{}
|
||||
beaconBlock, err := beaconBlockConverter.ConvertRESTBellatrixBlockToProto(test_helpers.GenerateJsonBellatrixBeaconBlock())
|
||||
require.NoError(t, err)
|
||||
assert.DeepEqual(t, expectedBeaconBlock, beaconBlock)
|
||||
}
|
||||
|
||||
func TestGetBeaconBlockConverter_BellatrixError(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
expectedErrorMessage string
|
||||
generateData func() *structs.BeaconBlockBellatrix
|
||||
}{
|
||||
{
|
||||
name: "nil body",
|
||||
expectedErrorMessage: "block body is nil",
|
||||
generateData: func() *structs.BeaconBlockBellatrix {
|
||||
beaconBlock := test_helpers.GenerateJsonBellatrixBeaconBlock()
|
||||
beaconBlock.Body = nil
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nil execution payload",
|
||||
expectedErrorMessage: "execution payload is nil",
|
||||
generateData: func() *structs.BeaconBlockBellatrix {
|
||||
beaconBlock := test_helpers.GenerateJsonBellatrixBeaconBlock()
|
||||
beaconBlock.Body.ExecutionPayload = nil
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad altair fields",
|
||||
expectedErrorMessage: "failed to get the altair fields of the bellatrix block",
|
||||
generateData: func() *structs.BeaconBlockBellatrix {
|
||||
beaconBlock := test_helpers.GenerateJsonBellatrixBeaconBlock()
|
||||
beaconBlock.Body.Eth1Data = nil
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad parent hash",
|
||||
expectedErrorMessage: "failed to decode execution payload parent hash `foo`",
|
||||
generateData: func() *structs.BeaconBlockBellatrix {
|
||||
beaconBlock := test_helpers.GenerateJsonBellatrixBeaconBlock()
|
||||
beaconBlock.Body.ExecutionPayload.ParentHash = "foo"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad fee recipient",
|
||||
expectedErrorMessage: "failed to decode execution payload fee recipient `bar`",
|
||||
generateData: func() *structs.BeaconBlockBellatrix {
|
||||
beaconBlock := test_helpers.GenerateJsonBellatrixBeaconBlock()
|
||||
beaconBlock.Body.ExecutionPayload.FeeRecipient = "bar"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad state root",
|
||||
expectedErrorMessage: "failed to decode execution payload state root `foo`",
|
||||
generateData: func() *structs.BeaconBlockBellatrix {
|
||||
beaconBlock := test_helpers.GenerateJsonBellatrixBeaconBlock()
|
||||
beaconBlock.Body.ExecutionPayload.StateRoot = "foo"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad receipts root",
|
||||
expectedErrorMessage: "failed to decode execution payload receipts root `bar`",
|
||||
generateData: func() *structs.BeaconBlockBellatrix {
|
||||
beaconBlock := test_helpers.GenerateJsonBellatrixBeaconBlock()
|
||||
beaconBlock.Body.ExecutionPayload.ReceiptsRoot = "bar"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad logs bloom",
|
||||
expectedErrorMessage: "failed to decode execution payload logs bloom `foo`",
|
||||
generateData: func() *structs.BeaconBlockBellatrix {
|
||||
beaconBlock := test_helpers.GenerateJsonBellatrixBeaconBlock()
|
||||
beaconBlock.Body.ExecutionPayload.LogsBloom = "foo"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad prev randao",
|
||||
expectedErrorMessage: "failed to decode execution payload prev randao `bar`",
|
||||
generateData: func() *structs.BeaconBlockBellatrix {
|
||||
beaconBlock := test_helpers.GenerateJsonBellatrixBeaconBlock()
|
||||
beaconBlock.Body.ExecutionPayload.PrevRandao = "bar"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad block number",
|
||||
expectedErrorMessage: "failed to parse execution payload block number `foo`",
|
||||
generateData: func() *structs.BeaconBlockBellatrix {
|
||||
beaconBlock := test_helpers.GenerateJsonBellatrixBeaconBlock()
|
||||
beaconBlock.Body.ExecutionPayload.BlockNumber = "foo"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad gas limit",
|
||||
expectedErrorMessage: "failed to parse execution payload gas limit `bar`",
|
||||
generateData: func() *structs.BeaconBlockBellatrix {
|
||||
beaconBlock := test_helpers.GenerateJsonBellatrixBeaconBlock()
|
||||
beaconBlock.Body.ExecutionPayload.GasLimit = "bar"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad gas used",
|
||||
expectedErrorMessage: "failed to parse execution payload gas used `foo`",
|
||||
generateData: func() *structs.BeaconBlockBellatrix {
|
||||
beaconBlock := test_helpers.GenerateJsonBellatrixBeaconBlock()
|
||||
beaconBlock.Body.ExecutionPayload.GasUsed = "foo"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad timestamp",
|
||||
expectedErrorMessage: "failed to parse execution payload timestamp `bar`",
|
||||
generateData: func() *structs.BeaconBlockBellatrix {
|
||||
beaconBlock := test_helpers.GenerateJsonBellatrixBeaconBlock()
|
||||
beaconBlock.Body.ExecutionPayload.Timestamp = "bar"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad extra data",
|
||||
expectedErrorMessage: "failed to decode execution payload extra data `foo`",
|
||||
generateData: func() *structs.BeaconBlockBellatrix {
|
||||
beaconBlock := test_helpers.GenerateJsonBellatrixBeaconBlock()
|
||||
beaconBlock.Body.ExecutionPayload.ExtraData = "foo"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad base fee per gas",
|
||||
expectedErrorMessage: "failed to parse execution payload base fee per gas `bar`",
|
||||
generateData: func() *structs.BeaconBlockBellatrix {
|
||||
beaconBlock := test_helpers.GenerateJsonBellatrixBeaconBlock()
|
||||
beaconBlock.Body.ExecutionPayload.BaseFeePerGas = "bar"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad block hash",
|
||||
expectedErrorMessage: "failed to decode execution payload block hash `foo`",
|
||||
generateData: func() *structs.BeaconBlockBellatrix {
|
||||
beaconBlock := test_helpers.GenerateJsonBellatrixBeaconBlock()
|
||||
beaconBlock.Body.ExecutionPayload.BlockHash = "foo"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad transactions",
|
||||
expectedErrorMessage: "failed to get execution payload transactions",
|
||||
generateData: func() *structs.BeaconBlockBellatrix {
|
||||
beaconBlock := test_helpers.GenerateJsonBellatrixBeaconBlock()
|
||||
beaconBlock.Body.ExecutionPayload.Transactions[0] = "bar"
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
beaconBlockJson := testCase.generateData()
|
||||
|
||||
beaconBlockConverter := &beaconApiBeaconBlockConverter{}
|
||||
_, err := beaconBlockConverter.ConvertRESTBellatrixBlockToProto(beaconBlockJson)
|
||||
assert.ErrorContains(t, testCase.expectedErrorMessage, err)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetBeaconBlockConverter_CapellaValid(t *testing.T) {
|
||||
expectedBeaconBlock := test_helpers.GenerateProtoCapellaBeaconBlock()
|
||||
beaconBlockConverter := &beaconApiBeaconBlockConverter{}
|
||||
beaconBlock, err := beaconBlockConverter.ConvertRESTCapellaBlockToProto(test_helpers.GenerateJsonCapellaBeaconBlock())
|
||||
require.NoError(t, err)
|
||||
assert.DeepEqual(t, expectedBeaconBlock, beaconBlock)
|
||||
}
|
||||
|
||||
func TestGetBeaconBlockConverter_CapellaError(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
expectedErrorMessage string
|
||||
generateData func() *structs.BeaconBlockCapella
|
||||
}{
|
||||
{
|
||||
name: "nil body",
|
||||
expectedErrorMessage: "block body is nil",
|
||||
generateData: func() *structs.BeaconBlockCapella {
|
||||
beaconBlock := test_helpers.GenerateJsonCapellaBeaconBlock()
|
||||
beaconBlock.Body = nil
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nil execution payload",
|
||||
expectedErrorMessage: "execution payload is nil",
|
||||
generateData: func() *structs.BeaconBlockCapella {
|
||||
beaconBlock := test_helpers.GenerateJsonCapellaBeaconBlock()
|
||||
beaconBlock.Body.ExecutionPayload = nil
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad bellatrix fields",
|
||||
expectedErrorMessage: "failed to get the bellatrix fields of the capella block",
|
||||
generateData: func() *structs.BeaconBlockCapella {
|
||||
beaconBlock := test_helpers.GenerateJsonCapellaBeaconBlock()
|
||||
beaconBlock.Body.Eth1Data = nil
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad withdrawals",
|
||||
expectedErrorMessage: "failed to get withdrawals",
|
||||
generateData: func() *structs.BeaconBlockCapella {
|
||||
beaconBlock := test_helpers.GenerateJsonCapellaBeaconBlock()
|
||||
beaconBlock.Body.ExecutionPayload.Withdrawals[0] = nil
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad bls execution changes",
|
||||
expectedErrorMessage: "failed to get bls to execution changes",
|
||||
generateData: func() *structs.BeaconBlockCapella {
|
||||
beaconBlock := test_helpers.GenerateJsonCapellaBeaconBlock()
|
||||
beaconBlock.Body.BLSToExecutionChanges[0] = nil
|
||||
return beaconBlock
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
beaconBlockJson := testCase.generateData()
|
||||
|
||||
beaconBlockConverter := &beaconApiBeaconBlockConverter{}
|
||||
_, err := beaconBlockConverter.ConvertRESTCapellaBlockToProto(beaconBlockJson)
|
||||
assert.ErrorContains(t, testCase.expectedErrorMessage, err)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,227 +0,0 @@
|
||||
package validator_api
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/apiutil"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
enginev1 "github.com/prysmaticlabs/prysm/v5/proto/engine/v1"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
)
|
||||
|
||||
func jsonifyTransactions(transactions [][]byte) []string {
|
||||
jsonTransactions := make([]string, len(transactions))
|
||||
for index, transaction := range transactions {
|
||||
jsonTransaction := hexutil.Encode(transaction)
|
||||
jsonTransactions[index] = jsonTransaction
|
||||
}
|
||||
return jsonTransactions
|
||||
}
|
||||
|
||||
func jsonifyBlsToExecutionChanges(blsToExecutionChanges []*ethpb.SignedBLSToExecutionChange) []*structs.SignedBLSToExecutionChange {
|
||||
jsonBlsToExecutionChanges := make([]*structs.SignedBLSToExecutionChange, len(blsToExecutionChanges))
|
||||
for index, signedBlsToExecutionChange := range blsToExecutionChanges {
|
||||
blsToExecutionChangeJson := &structs.BLSToExecutionChange{
|
||||
ValidatorIndex: apiutil.Uint64ToString(signedBlsToExecutionChange.Message.ValidatorIndex),
|
||||
FromBLSPubkey: hexutil.Encode(signedBlsToExecutionChange.Message.FromBlsPubkey),
|
||||
ToExecutionAddress: hexutil.Encode(signedBlsToExecutionChange.Message.ToExecutionAddress),
|
||||
}
|
||||
signedJson := &structs.SignedBLSToExecutionChange{
|
||||
Message: blsToExecutionChangeJson,
|
||||
Signature: hexutil.Encode(signedBlsToExecutionChange.Signature),
|
||||
}
|
||||
jsonBlsToExecutionChanges[index] = signedJson
|
||||
}
|
||||
return jsonBlsToExecutionChanges
|
||||
}
|
||||
|
||||
func jsonifyEth1Data(eth1Data *ethpb.Eth1Data) *structs.Eth1Data {
|
||||
return &structs.Eth1Data{
|
||||
BlockHash: hexutil.Encode(eth1Data.BlockHash),
|
||||
DepositCount: apiutil.Uint64ToString(eth1Data.DepositCount),
|
||||
DepositRoot: hexutil.Encode(eth1Data.DepositRoot),
|
||||
}
|
||||
}
|
||||
|
||||
func jsonifyAttestations(attestations []*ethpb.Attestation) []*structs.Attestation {
|
||||
jsonAttestations := make([]*structs.Attestation, len(attestations))
|
||||
for index, attestation := range attestations {
|
||||
jsonAttestations[index] = jsonifyAttestation(attestation)
|
||||
}
|
||||
return jsonAttestations
|
||||
}
|
||||
|
||||
func jsonifySingleAttestations(attestations []*ethpb.SingleAttestation) []*structs.SingleAttestation {
|
||||
jsonAttestations := make([]*structs.SingleAttestation, len(attestations))
|
||||
for index, attestation := range attestations {
|
||||
jsonAttestations[index] = jsonifySingleAttestation(attestation)
|
||||
}
|
||||
return jsonAttestations
|
||||
}
|
||||
|
||||
func jsonifyAttesterSlashings(attesterSlashings []*ethpb.AttesterSlashing) []*structs.AttesterSlashing {
|
||||
jsonAttesterSlashings := make([]*structs.AttesterSlashing, len(attesterSlashings))
|
||||
for index, attesterSlashing := range attesterSlashings {
|
||||
jsonAttesterSlashing := &structs.AttesterSlashing{
|
||||
Attestation1: jsonifyIndexedAttestation(attesterSlashing.Attestation_1),
|
||||
Attestation2: jsonifyIndexedAttestation(attesterSlashing.Attestation_2),
|
||||
}
|
||||
jsonAttesterSlashings[index] = jsonAttesterSlashing
|
||||
}
|
||||
return jsonAttesterSlashings
|
||||
}
|
||||
|
||||
func jsonifyDeposits(deposits []*ethpb.Deposit) []*structs.Deposit {
|
||||
jsonDeposits := make([]*structs.Deposit, len(deposits))
|
||||
for depositIndex, deposit := range deposits {
|
||||
proofs := make([]string, len(deposit.Proof))
|
||||
for proofIndex, proof := range deposit.Proof {
|
||||
proofs[proofIndex] = hexutil.Encode(proof)
|
||||
}
|
||||
|
||||
jsonDeposit := &structs.Deposit{
|
||||
Data: &structs.DepositData{
|
||||
Amount: apiutil.Uint64ToString(deposit.Data.Amount),
|
||||
Pubkey: hexutil.Encode(deposit.Data.PublicKey),
|
||||
Signature: hexutil.Encode(deposit.Data.Signature),
|
||||
WithdrawalCredentials: hexutil.Encode(deposit.Data.WithdrawalCredentials),
|
||||
},
|
||||
Proof: proofs,
|
||||
}
|
||||
jsonDeposits[depositIndex] = jsonDeposit
|
||||
}
|
||||
return jsonDeposits
|
||||
}
|
||||
|
||||
func jsonifyProposerSlashings(proposerSlashings []*ethpb.ProposerSlashing) []*structs.ProposerSlashing {
|
||||
jsonProposerSlashings := make([]*structs.ProposerSlashing, len(proposerSlashings))
|
||||
for index, proposerSlashing := range proposerSlashings {
|
||||
jsonProposerSlashing := &structs.ProposerSlashing{
|
||||
SignedHeader1: jsonifySignedBeaconBlockHeader(proposerSlashing.Header_1),
|
||||
SignedHeader2: jsonifySignedBeaconBlockHeader(proposerSlashing.Header_2),
|
||||
}
|
||||
jsonProposerSlashings[index] = jsonProposerSlashing
|
||||
}
|
||||
return jsonProposerSlashings
|
||||
}
|
||||
|
||||
// JsonifySignedVoluntaryExits converts an array of voluntary exit structs to a JSON hex string compatible format.
|
||||
func JsonifySignedVoluntaryExits(voluntaryExits []*ethpb.SignedVoluntaryExit) []*structs.SignedVoluntaryExit {
|
||||
jsonSignedVoluntaryExits := make([]*structs.SignedVoluntaryExit, len(voluntaryExits))
|
||||
for index, signedVoluntaryExit := range voluntaryExits {
|
||||
jsonSignedVoluntaryExit := &structs.SignedVoluntaryExit{
|
||||
Message: &structs.VoluntaryExit{
|
||||
Epoch: apiutil.Uint64ToString(signedVoluntaryExit.Exit.Epoch),
|
||||
ValidatorIndex: apiutil.Uint64ToString(signedVoluntaryExit.Exit.ValidatorIndex),
|
||||
},
|
||||
Signature: hexutil.Encode(signedVoluntaryExit.Signature),
|
||||
}
|
||||
jsonSignedVoluntaryExits[index] = jsonSignedVoluntaryExit
|
||||
}
|
||||
return jsonSignedVoluntaryExits
|
||||
}
|
||||
|
||||
func jsonifySignedBeaconBlockHeader(signedBeaconBlockHeader *ethpb.SignedBeaconBlockHeader) *structs.SignedBeaconBlockHeader {
|
||||
return &structs.SignedBeaconBlockHeader{
|
||||
Message: &structs.BeaconBlockHeader{
|
||||
BodyRoot: hexutil.Encode(signedBeaconBlockHeader.Header.BodyRoot),
|
||||
ParentRoot: hexutil.Encode(signedBeaconBlockHeader.Header.ParentRoot),
|
||||
ProposerIndex: apiutil.Uint64ToString(signedBeaconBlockHeader.Header.ProposerIndex),
|
||||
Slot: apiutil.Uint64ToString(signedBeaconBlockHeader.Header.Slot),
|
||||
StateRoot: hexutil.Encode(signedBeaconBlockHeader.Header.StateRoot),
|
||||
},
|
||||
Signature: hexutil.Encode(signedBeaconBlockHeader.Signature),
|
||||
}
|
||||
}
|
||||
|
||||
func jsonifyIndexedAttestation(indexedAttestation *ethpb.IndexedAttestation) *structs.IndexedAttestation {
|
||||
attestingIndices := make([]string, len(indexedAttestation.AttestingIndices))
|
||||
for index, attestingIndex := range indexedAttestation.AttestingIndices {
|
||||
attestingIndex := apiutil.Uint64ToString(attestingIndex)
|
||||
attestingIndices[index] = attestingIndex
|
||||
}
|
||||
|
||||
return &structs.IndexedAttestation{
|
||||
AttestingIndices: attestingIndices,
|
||||
Data: jsonifyAttestationData(indexedAttestation.Data),
|
||||
Signature: hexutil.Encode(indexedAttestation.Signature),
|
||||
}
|
||||
}
|
||||
|
||||
func jsonifyAttestationData(attestationData *ethpb.AttestationData) *structs.AttestationData {
|
||||
return &structs.AttestationData{
|
||||
BeaconBlockRoot: hexutil.Encode(attestationData.BeaconBlockRoot),
|
||||
CommitteeIndex: apiutil.Uint64ToString(attestationData.CommitteeIndex),
|
||||
Slot: apiutil.Uint64ToString(attestationData.Slot),
|
||||
Source: &structs.Checkpoint{
|
||||
Epoch: apiutil.Uint64ToString(attestationData.Source.Epoch),
|
||||
Root: hexutil.Encode(attestationData.Source.Root),
|
||||
},
|
||||
Target: &structs.Checkpoint{
|
||||
Epoch: apiutil.Uint64ToString(attestationData.Target.Epoch),
|
||||
Root: hexutil.Encode(attestationData.Target.Root),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func jsonifyAttestation(attestation *ethpb.Attestation) *structs.Attestation {
|
||||
return &structs.Attestation{
|
||||
AggregationBits: hexutil.Encode(attestation.AggregationBits),
|
||||
Data: jsonifyAttestationData(attestation.Data),
|
||||
Signature: hexutil.Encode(attestation.Signature),
|
||||
}
|
||||
}
|
||||
|
||||
func jsonifyAttestationElectra(attestation *ethpb.AttestationElectra) *structs.AttestationElectra {
|
||||
return &structs.AttestationElectra{
|
||||
AggregationBits: hexutil.Encode(attestation.AggregationBits),
|
||||
Data: jsonifyAttestationData(attestation.Data),
|
||||
Signature: hexutil.Encode(attestation.Signature),
|
||||
CommitteeBits: hexutil.Encode(attestation.CommitteeBits),
|
||||
}
|
||||
}
|
||||
|
||||
func jsonifySingleAttestation(attestation *ethpb.SingleAttestation) *structs.SingleAttestation {
|
||||
return &structs.SingleAttestation{
|
||||
CommitteeIndex: apiutil.Uint64ToString(attestation.CommitteeId),
|
||||
AttesterIndex: apiutil.Uint64ToString(attestation.AttesterIndex),
|
||||
Data: jsonifyAttestationData(attestation.Data),
|
||||
Signature: hexutil.Encode(attestation.Signature),
|
||||
}
|
||||
}
|
||||
|
||||
func jsonifySignedAggregateAndProof(signedAggregateAndProof *ethpb.SignedAggregateAttestationAndProof) *structs.SignedAggregateAttestationAndProof {
|
||||
return &structs.SignedAggregateAttestationAndProof{
|
||||
Message: &structs.AggregateAttestationAndProof{
|
||||
AggregatorIndex: apiutil.Uint64ToString(signedAggregateAndProof.Message.AggregatorIndex),
|
||||
Aggregate: jsonifyAttestation(signedAggregateAndProof.Message.Aggregate),
|
||||
SelectionProof: hexutil.Encode(signedAggregateAndProof.Message.SelectionProof),
|
||||
},
|
||||
Signature: hexutil.Encode(signedAggregateAndProof.Signature),
|
||||
}
|
||||
}
|
||||
|
||||
func jsonifySignedAggregateAndProofElectra(signedAggregateAndProof *ethpb.SignedAggregateAttestationAndProofElectra) *structs.SignedAggregateAttestationAndProofElectra {
|
||||
return &structs.SignedAggregateAttestationAndProofElectra{
|
||||
Message: &structs.AggregateAttestationAndProofElectra{
|
||||
AggregatorIndex: apiutil.Uint64ToString(signedAggregateAndProof.Message.AggregatorIndex),
|
||||
Aggregate: jsonifyAttestationElectra(signedAggregateAndProof.Message.Aggregate),
|
||||
SelectionProof: hexutil.Encode(signedAggregateAndProof.Message.SelectionProof),
|
||||
},
|
||||
Signature: hexutil.Encode(signedAggregateAndProof.Signature),
|
||||
}
|
||||
}
|
||||
|
||||
func jsonifyWithdrawals(withdrawals []*enginev1.Withdrawal) []*structs.Withdrawal {
|
||||
jsonWithdrawals := make([]*structs.Withdrawal, len(withdrawals))
|
||||
for index, withdrawal := range withdrawals {
|
||||
jsonWithdrawals[index] = &structs.Withdrawal{
|
||||
WithdrawalIndex: strconv.FormatUint(withdrawal.Index, 10),
|
||||
ValidatorIndex: strconv.FormatUint(uint64(withdrawal.ValidatorIndex), 10),
|
||||
ExecutionAddress: hexutil.Encode(withdrawal.Address),
|
||||
Amount: strconv.FormatUint(withdrawal.Amount, 10),
|
||||
}
|
||||
}
|
||||
return jsonWithdrawals
|
||||
}
|
||||
@@ -1,648 +0,0 @@
|
||||
package validator_api
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
enginev1 "github.com/prysmaticlabs/prysm/v5/proto/engine/v1"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/assert"
|
||||
)
|
||||
|
||||
func TestBeaconBlockJsonHelpers_JsonifyTransactions(t *testing.T) {
|
||||
input := [][]byte{{1}, {2}, {3}, {4}}
|
||||
|
||||
expectedResult := []string{
|
||||
hexutil.Encode([]byte{1}),
|
||||
hexutil.Encode([]byte{2}),
|
||||
hexutil.Encode([]byte{3}),
|
||||
hexutil.Encode([]byte{4}),
|
||||
}
|
||||
|
||||
result := jsonifyTransactions(input)
|
||||
assert.DeepEqual(t, expectedResult, result)
|
||||
}
|
||||
|
||||
func TestBeaconBlockJsonHelpers_JsonifyBlsToExecutionChanges(t *testing.T) {
|
||||
input := []*ethpb.SignedBLSToExecutionChange{
|
||||
{
|
||||
Message: ðpb.BLSToExecutionChange{
|
||||
ValidatorIndex: 1,
|
||||
FromBlsPubkey: []byte{2},
|
||||
ToExecutionAddress: []byte{3},
|
||||
},
|
||||
Signature: []byte{7},
|
||||
},
|
||||
{
|
||||
Message: ðpb.BLSToExecutionChange{
|
||||
ValidatorIndex: 4,
|
||||
FromBlsPubkey: []byte{5},
|
||||
ToExecutionAddress: []byte{6},
|
||||
},
|
||||
Signature: []byte{8},
|
||||
},
|
||||
}
|
||||
|
||||
expectedResult := []*structs.SignedBLSToExecutionChange{
|
||||
{
|
||||
Message: &structs.BLSToExecutionChange{
|
||||
ValidatorIndex: "1",
|
||||
FromBLSPubkey: hexutil.Encode([]byte{2}),
|
||||
ToExecutionAddress: hexutil.Encode([]byte{3}),
|
||||
},
|
||||
Signature: hexutil.Encode([]byte{7}),
|
||||
},
|
||||
{
|
||||
Message: &structs.BLSToExecutionChange{
|
||||
ValidatorIndex: "4",
|
||||
FromBLSPubkey: hexutil.Encode([]byte{5}),
|
||||
ToExecutionAddress: hexutil.Encode([]byte{6}),
|
||||
},
|
||||
Signature: hexutil.Encode([]byte{8}),
|
||||
},
|
||||
}
|
||||
|
||||
assert.DeepEqual(t, expectedResult, structs.SignedBLSChangesFromConsensus(input))
|
||||
}
|
||||
|
||||
func TestBeaconBlockJsonHelpers_JsonifyEth1Data(t *testing.T) {
|
||||
input := ðpb.Eth1Data{
|
||||
DepositRoot: []byte{1},
|
||||
DepositCount: 2,
|
||||
BlockHash: []byte{3},
|
||||
}
|
||||
|
||||
expectedResult := &structs.Eth1Data{
|
||||
DepositRoot: hexutil.Encode([]byte{1}),
|
||||
DepositCount: "2",
|
||||
BlockHash: hexutil.Encode([]byte{3}),
|
||||
}
|
||||
|
||||
result := jsonifyEth1Data(input)
|
||||
assert.DeepEqual(t, expectedResult, result)
|
||||
}
|
||||
|
||||
func TestBeaconBlockJsonHelpers_JsonifyAttestations(t *testing.T) {
|
||||
input := []*ethpb.Attestation{
|
||||
{
|
||||
AggregationBits: []byte{1},
|
||||
Data: ðpb.AttestationData{
|
||||
Slot: 2,
|
||||
CommitteeIndex: 3,
|
||||
BeaconBlockRoot: []byte{4},
|
||||
Source: ðpb.Checkpoint{
|
||||
Epoch: 5,
|
||||
Root: []byte{6},
|
||||
},
|
||||
Target: ðpb.Checkpoint{
|
||||
Epoch: 7,
|
||||
Root: []byte{8},
|
||||
},
|
||||
},
|
||||
Signature: []byte{9},
|
||||
},
|
||||
{
|
||||
AggregationBits: []byte{10},
|
||||
Data: ðpb.AttestationData{
|
||||
Slot: 11,
|
||||
CommitteeIndex: 12,
|
||||
BeaconBlockRoot: []byte{13},
|
||||
Source: ðpb.Checkpoint{
|
||||
Epoch: 14,
|
||||
Root: []byte{15},
|
||||
},
|
||||
Target: ðpb.Checkpoint{
|
||||
Epoch: 16,
|
||||
Root: []byte{17},
|
||||
},
|
||||
},
|
||||
Signature: []byte{18},
|
||||
},
|
||||
}
|
||||
|
||||
expectedResult := []*structs.Attestation{
|
||||
{
|
||||
AggregationBits: hexutil.Encode([]byte{1}),
|
||||
Data: &structs.AttestationData{
|
||||
Slot: "2",
|
||||
CommitteeIndex: "3",
|
||||
BeaconBlockRoot: hexutil.Encode([]byte{4}),
|
||||
Source: &structs.Checkpoint{
|
||||
Epoch: "5",
|
||||
Root: hexutil.Encode([]byte{6}),
|
||||
},
|
||||
Target: &structs.Checkpoint{
|
||||
Epoch: "7",
|
||||
Root: hexutil.Encode([]byte{8}),
|
||||
},
|
||||
},
|
||||
Signature: hexutil.Encode([]byte{9}),
|
||||
},
|
||||
{
|
||||
AggregationBits: hexutil.Encode([]byte{10}),
|
||||
Data: &structs.AttestationData{
|
||||
Slot: "11",
|
||||
CommitteeIndex: "12",
|
||||
BeaconBlockRoot: hexutil.Encode([]byte{13}),
|
||||
Source: &structs.Checkpoint{
|
||||
Epoch: "14",
|
||||
Root: hexutil.Encode([]byte{15}),
|
||||
},
|
||||
Target: &structs.Checkpoint{
|
||||
Epoch: "16",
|
||||
Root: hexutil.Encode([]byte{17}),
|
||||
},
|
||||
},
|
||||
Signature: hexutil.Encode([]byte{18}),
|
||||
},
|
||||
}
|
||||
|
||||
result := jsonifyAttestations(input)
|
||||
assert.DeepEqual(t, expectedResult, result)
|
||||
}
|
||||
|
||||
func TestBeaconBlockJsonHelpers_JsonifyAttesterSlashings(t *testing.T) {
|
||||
input := []*ethpb.AttesterSlashing{
|
||||
{
|
||||
Attestation_1: ðpb.IndexedAttestation{
|
||||
AttestingIndices: []uint64{1, 2},
|
||||
Data: ðpb.AttestationData{
|
||||
Slot: 3,
|
||||
CommitteeIndex: 4,
|
||||
BeaconBlockRoot: []byte{5},
|
||||
Source: ðpb.Checkpoint{
|
||||
Epoch: 6,
|
||||
Root: []byte{7},
|
||||
},
|
||||
Target: ðpb.Checkpoint{
|
||||
Epoch: 8,
|
||||
Root: []byte{9},
|
||||
},
|
||||
},
|
||||
Signature: []byte{10},
|
||||
},
|
||||
Attestation_2: ðpb.IndexedAttestation{
|
||||
AttestingIndices: []uint64{11, 12},
|
||||
Data: ðpb.AttestationData{
|
||||
Slot: 13,
|
||||
CommitteeIndex: 14,
|
||||
BeaconBlockRoot: []byte{15},
|
||||
Source: ðpb.Checkpoint{
|
||||
Epoch: 16,
|
||||
Root: []byte{17},
|
||||
},
|
||||
Target: ðpb.Checkpoint{
|
||||
Epoch: 18,
|
||||
Root: []byte{19},
|
||||
},
|
||||
},
|
||||
Signature: []byte{20},
|
||||
},
|
||||
},
|
||||
{
|
||||
Attestation_1: ðpb.IndexedAttestation{
|
||||
AttestingIndices: []uint64{21, 22},
|
||||
Data: ðpb.AttestationData{
|
||||
Slot: 23,
|
||||
CommitteeIndex: 24,
|
||||
BeaconBlockRoot: []byte{25},
|
||||
Source: ðpb.Checkpoint{
|
||||
Epoch: 26,
|
||||
Root: []byte{27},
|
||||
},
|
||||
Target: ðpb.Checkpoint{
|
||||
Epoch: 28,
|
||||
Root: []byte{29},
|
||||
},
|
||||
},
|
||||
Signature: []byte{30},
|
||||
},
|
||||
Attestation_2: ðpb.IndexedAttestation{
|
||||
AttestingIndices: []uint64{31, 32},
|
||||
Data: ðpb.AttestationData{
|
||||
Slot: 33,
|
||||
CommitteeIndex: 34,
|
||||
BeaconBlockRoot: []byte{35},
|
||||
Source: ðpb.Checkpoint{
|
||||
Epoch: 36,
|
||||
Root: []byte{37},
|
||||
},
|
||||
Target: ðpb.Checkpoint{
|
||||
Epoch: 38,
|
||||
Root: []byte{39},
|
||||
},
|
||||
},
|
||||
Signature: []byte{40},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
expectedResult := []*structs.AttesterSlashing{
|
||||
{
|
||||
Attestation1: &structs.IndexedAttestation{
|
||||
AttestingIndices: []string{"1", "2"},
|
||||
Data: &structs.AttestationData{
|
||||
Slot: "3",
|
||||
CommitteeIndex: "4",
|
||||
BeaconBlockRoot: hexutil.Encode([]byte{5}),
|
||||
Source: &structs.Checkpoint{
|
||||
Epoch: "6",
|
||||
Root: hexutil.Encode([]byte{7}),
|
||||
},
|
||||
Target: &structs.Checkpoint{
|
||||
Epoch: "8",
|
||||
Root: hexutil.Encode([]byte{9}),
|
||||
},
|
||||
},
|
||||
Signature: hexutil.Encode([]byte{10}),
|
||||
},
|
||||
Attestation2: &structs.IndexedAttestation{
|
||||
AttestingIndices: []string{"11", "12"},
|
||||
Data: &structs.AttestationData{
|
||||
Slot: "13",
|
||||
CommitteeIndex: "14",
|
||||
BeaconBlockRoot: hexutil.Encode([]byte{15}),
|
||||
Source: &structs.Checkpoint{
|
||||
Epoch: "16",
|
||||
Root: hexutil.Encode([]byte{17}),
|
||||
},
|
||||
Target: &structs.Checkpoint{
|
||||
Epoch: "18",
|
||||
Root: hexutil.Encode([]byte{19}),
|
||||
},
|
||||
},
|
||||
Signature: hexutil.Encode([]byte{20}),
|
||||
},
|
||||
},
|
||||
{
|
||||
Attestation1: &structs.IndexedAttestation{
|
||||
AttestingIndices: []string{"21", "22"},
|
||||
Data: &structs.AttestationData{
|
||||
Slot: "23",
|
||||
CommitteeIndex: "24",
|
||||
BeaconBlockRoot: hexutil.Encode([]byte{25}),
|
||||
Source: &structs.Checkpoint{
|
||||
Epoch: "26",
|
||||
Root: hexutil.Encode([]byte{27}),
|
||||
},
|
||||
Target: &structs.Checkpoint{
|
||||
Epoch: "28",
|
||||
Root: hexutil.Encode([]byte{29}),
|
||||
},
|
||||
},
|
||||
Signature: hexutil.Encode([]byte{30}),
|
||||
},
|
||||
Attestation2: &structs.IndexedAttestation{
|
||||
AttestingIndices: []string{"31", "32"},
|
||||
Data: &structs.AttestationData{
|
||||
Slot: "33",
|
||||
CommitteeIndex: "34",
|
||||
BeaconBlockRoot: hexutil.Encode([]byte{35}),
|
||||
Source: &structs.Checkpoint{
|
||||
Epoch: "36",
|
||||
Root: hexutil.Encode([]byte{37}),
|
||||
},
|
||||
Target: &structs.Checkpoint{
|
||||
Epoch: "38",
|
||||
Root: hexutil.Encode([]byte{39}),
|
||||
},
|
||||
},
|
||||
Signature: hexutil.Encode([]byte{40}),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
result := jsonifyAttesterSlashings(input)
|
||||
assert.DeepEqual(t, expectedResult, result)
|
||||
}
|
||||
|
||||
func TestBeaconBlockJsonHelpers_JsonifyDeposits(t *testing.T) {
|
||||
input := []*ethpb.Deposit{
|
||||
{
|
||||
Proof: [][]byte{{1}, {2}},
|
||||
Data: ðpb.Deposit_Data{
|
||||
PublicKey: []byte{3},
|
||||
WithdrawalCredentials: []byte{4},
|
||||
Amount: 5,
|
||||
Signature: []byte{6},
|
||||
},
|
||||
},
|
||||
{
|
||||
Proof: [][]byte{
|
||||
{7},
|
||||
{8},
|
||||
},
|
||||
Data: ðpb.Deposit_Data{
|
||||
PublicKey: []byte{9},
|
||||
WithdrawalCredentials: []byte{10},
|
||||
Amount: 11,
|
||||
Signature: []byte{12},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
expectedResult := []*structs.Deposit{
|
||||
{
|
||||
Proof: []string{
|
||||
hexutil.Encode([]byte{1}),
|
||||
hexutil.Encode([]byte{2}),
|
||||
},
|
||||
Data: &structs.DepositData{
|
||||
Pubkey: hexutil.Encode([]byte{3}),
|
||||
WithdrawalCredentials: hexutil.Encode([]byte{4}),
|
||||
Amount: "5",
|
||||
Signature: hexutil.Encode([]byte{6}),
|
||||
},
|
||||
},
|
||||
{
|
||||
Proof: []string{
|
||||
hexutil.Encode([]byte{7}),
|
||||
hexutil.Encode([]byte{8}),
|
||||
},
|
||||
Data: &structs.DepositData{
|
||||
Pubkey: hexutil.Encode([]byte{9}),
|
||||
WithdrawalCredentials: hexutil.Encode([]byte{10}),
|
||||
Amount: "11",
|
||||
Signature: hexutil.Encode([]byte{12}),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
result := jsonifyDeposits(input)
|
||||
assert.DeepEqual(t, expectedResult, result)
|
||||
}
|
||||
|
||||
func TestBeaconBlockJsonHelpers_JsonifyProposerSlashings(t *testing.T) {
|
||||
input := []*ethpb.ProposerSlashing{
|
||||
{
|
||||
Header_1: ðpb.SignedBeaconBlockHeader{
|
||||
Header: ðpb.BeaconBlockHeader{
|
||||
Slot: 1,
|
||||
ProposerIndex: 2,
|
||||
ParentRoot: []byte{3},
|
||||
StateRoot: []byte{4},
|
||||
BodyRoot: []byte{5},
|
||||
},
|
||||
Signature: []byte{6},
|
||||
},
|
||||
Header_2: ðpb.SignedBeaconBlockHeader{
|
||||
Header: ðpb.BeaconBlockHeader{
|
||||
Slot: 7,
|
||||
ProposerIndex: 8,
|
||||
ParentRoot: []byte{9},
|
||||
StateRoot: []byte{10},
|
||||
BodyRoot: []byte{11},
|
||||
},
|
||||
Signature: []byte{12},
|
||||
},
|
||||
},
|
||||
{
|
||||
Header_1: ðpb.SignedBeaconBlockHeader{
|
||||
Header: ðpb.BeaconBlockHeader{
|
||||
Slot: 13,
|
||||
ProposerIndex: 14,
|
||||
ParentRoot: []byte{15},
|
||||
StateRoot: []byte{16},
|
||||
BodyRoot: []byte{17},
|
||||
},
|
||||
Signature: []byte{18},
|
||||
},
|
||||
Header_2: ðpb.SignedBeaconBlockHeader{
|
||||
Header: ðpb.BeaconBlockHeader{
|
||||
Slot: 19,
|
||||
ProposerIndex: 20,
|
||||
ParentRoot: []byte{21},
|
||||
StateRoot: []byte{22},
|
||||
BodyRoot: []byte{23},
|
||||
},
|
||||
Signature: []byte{24},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
expectedResult := []*structs.ProposerSlashing{
|
||||
{
|
||||
SignedHeader1: &structs.SignedBeaconBlockHeader{
|
||||
Message: &structs.BeaconBlockHeader{
|
||||
Slot: "1",
|
||||
ProposerIndex: "2",
|
||||
ParentRoot: hexutil.Encode([]byte{3}),
|
||||
StateRoot: hexutil.Encode([]byte{4}),
|
||||
BodyRoot: hexutil.Encode([]byte{5}),
|
||||
},
|
||||
Signature: hexutil.Encode([]byte{6}),
|
||||
},
|
||||
SignedHeader2: &structs.SignedBeaconBlockHeader{
|
||||
Message: &structs.BeaconBlockHeader{
|
||||
Slot: "7",
|
||||
ProposerIndex: "8",
|
||||
ParentRoot: hexutil.Encode([]byte{9}),
|
||||
StateRoot: hexutil.Encode([]byte{10}),
|
||||
BodyRoot: hexutil.Encode([]byte{11}),
|
||||
},
|
||||
Signature: hexutil.Encode([]byte{12}),
|
||||
},
|
||||
},
|
||||
{
|
||||
SignedHeader1: &structs.SignedBeaconBlockHeader{
|
||||
Message: &structs.BeaconBlockHeader{
|
||||
Slot: "13",
|
||||
ProposerIndex: "14",
|
||||
ParentRoot: hexutil.Encode([]byte{15}),
|
||||
StateRoot: hexutil.Encode([]byte{16}),
|
||||
BodyRoot: hexutil.Encode([]byte{17}),
|
||||
},
|
||||
Signature: hexutil.Encode([]byte{18}),
|
||||
},
|
||||
SignedHeader2: &structs.SignedBeaconBlockHeader{
|
||||
Message: &structs.BeaconBlockHeader{
|
||||
Slot: "19",
|
||||
ProposerIndex: "20",
|
||||
ParentRoot: hexutil.Encode([]byte{21}),
|
||||
StateRoot: hexutil.Encode([]byte{22}),
|
||||
BodyRoot: hexutil.Encode([]byte{23}),
|
||||
},
|
||||
Signature: hexutil.Encode([]byte{24}),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
result := jsonifyProposerSlashings(input)
|
||||
assert.DeepEqual(t, expectedResult, result)
|
||||
}
|
||||
|
||||
func TestBeaconBlockJsonHelpers_JsonifySignedVoluntaryExits(t *testing.T) {
|
||||
input := []*ethpb.SignedVoluntaryExit{
|
||||
{
|
||||
Exit: ðpb.VoluntaryExit{
|
||||
Epoch: 1,
|
||||
ValidatorIndex: 2,
|
||||
},
|
||||
Signature: []byte{3},
|
||||
},
|
||||
{
|
||||
Exit: ðpb.VoluntaryExit{
|
||||
Epoch: 4,
|
||||
ValidatorIndex: 5,
|
||||
},
|
||||
Signature: []byte{6},
|
||||
},
|
||||
}
|
||||
|
||||
expectedResult := []*structs.SignedVoluntaryExit{
|
||||
{
|
||||
Message: &structs.VoluntaryExit{
|
||||
Epoch: "1",
|
||||
ValidatorIndex: "2",
|
||||
},
|
||||
Signature: hexutil.Encode([]byte{3}),
|
||||
},
|
||||
{
|
||||
Message: &structs.VoluntaryExit{
|
||||
Epoch: "4",
|
||||
ValidatorIndex: "5",
|
||||
},
|
||||
Signature: hexutil.Encode([]byte{6}),
|
||||
},
|
||||
}
|
||||
|
||||
result := JsonifySignedVoluntaryExits(input)
|
||||
assert.DeepEqual(t, expectedResult, result)
|
||||
}
|
||||
|
||||
func TestBeaconBlockJsonHelpers_JsonifySignedBeaconBlockHeader(t *testing.T) {
|
||||
input := ðpb.SignedBeaconBlockHeader{
|
||||
Header: ðpb.BeaconBlockHeader{
|
||||
Slot: 1,
|
||||
ProposerIndex: 2,
|
||||
ParentRoot: []byte{3},
|
||||
StateRoot: []byte{4},
|
||||
BodyRoot: []byte{5},
|
||||
},
|
||||
Signature: []byte{6},
|
||||
}
|
||||
|
||||
expectedResult := &structs.SignedBeaconBlockHeader{
|
||||
Message: &structs.BeaconBlockHeader{
|
||||
Slot: "1",
|
||||
ProposerIndex: "2",
|
||||
ParentRoot: hexutil.Encode([]byte{3}),
|
||||
StateRoot: hexutil.Encode([]byte{4}),
|
||||
BodyRoot: hexutil.Encode([]byte{5}),
|
||||
},
|
||||
Signature: hexutil.Encode([]byte{6}),
|
||||
}
|
||||
|
||||
result := jsonifySignedBeaconBlockHeader(input)
|
||||
assert.DeepEqual(t, expectedResult, result)
|
||||
}
|
||||
|
||||
func TestBeaconBlockJsonHelpers_JsonifyIndexedAttestation(t *testing.T) {
|
||||
input := ðpb.IndexedAttestation{
|
||||
AttestingIndices: []uint64{1, 2},
|
||||
Data: ðpb.AttestationData{
|
||||
Slot: 3,
|
||||
CommitteeIndex: 4,
|
||||
BeaconBlockRoot: []byte{5},
|
||||
Source: ðpb.Checkpoint{
|
||||
Epoch: 6,
|
||||
Root: []byte{7},
|
||||
},
|
||||
Target: ðpb.Checkpoint{
|
||||
Epoch: 8,
|
||||
Root: []byte{9},
|
||||
},
|
||||
},
|
||||
Signature: []byte{10},
|
||||
}
|
||||
|
||||
expectedResult := &structs.IndexedAttestation{
|
||||
AttestingIndices: []string{"1", "2"},
|
||||
Data: &structs.AttestationData{
|
||||
Slot: "3",
|
||||
CommitteeIndex: "4",
|
||||
BeaconBlockRoot: hexutil.Encode([]byte{5}),
|
||||
Source: &structs.Checkpoint{
|
||||
Epoch: "6",
|
||||
Root: hexutil.Encode([]byte{7}),
|
||||
},
|
||||
Target: &structs.Checkpoint{
|
||||
Epoch: "8",
|
||||
Root: hexutil.Encode([]byte{9}),
|
||||
},
|
||||
},
|
||||
Signature: hexutil.Encode([]byte{10}),
|
||||
}
|
||||
|
||||
result := jsonifyIndexedAttestation(input)
|
||||
assert.DeepEqual(t, expectedResult, result)
|
||||
}
|
||||
|
||||
func TestBeaconBlockJsonHelpers_JsonifyAttestationData(t *testing.T) {
|
||||
input := ðpb.AttestationData{
|
||||
Slot: 1,
|
||||
CommitteeIndex: 2,
|
||||
BeaconBlockRoot: []byte{3},
|
||||
Source: ðpb.Checkpoint{
|
||||
Epoch: 4,
|
||||
Root: []byte{5},
|
||||
},
|
||||
Target: ðpb.Checkpoint{
|
||||
Epoch: 6,
|
||||
Root: []byte{7},
|
||||
},
|
||||
}
|
||||
|
||||
expectedResult := &structs.AttestationData{
|
||||
Slot: "1",
|
||||
CommitteeIndex: "2",
|
||||
BeaconBlockRoot: hexutil.Encode([]byte{3}),
|
||||
Source: &structs.Checkpoint{
|
||||
Epoch: "4",
|
||||
Root: hexutil.Encode([]byte{5}),
|
||||
},
|
||||
Target: &structs.Checkpoint{
|
||||
Epoch: "6",
|
||||
Root: hexutil.Encode([]byte{7}),
|
||||
},
|
||||
}
|
||||
|
||||
result := jsonifyAttestationData(input)
|
||||
assert.DeepEqual(t, expectedResult, result)
|
||||
}
|
||||
|
||||
func TestBeaconBlockJsonHelpers_JsonifyWithdrawals(t *testing.T) {
|
||||
input := []*enginev1.Withdrawal{
|
||||
{
|
||||
Index: 1,
|
||||
ValidatorIndex: 2,
|
||||
Address: []byte{3},
|
||||
Amount: 4,
|
||||
},
|
||||
{
|
||||
Index: 5,
|
||||
ValidatorIndex: 6,
|
||||
Address: []byte{7},
|
||||
Amount: 8,
|
||||
},
|
||||
}
|
||||
|
||||
expectedResult := []*structs.Withdrawal{
|
||||
{
|
||||
WithdrawalIndex: "1",
|
||||
ValidatorIndex: "2",
|
||||
ExecutionAddress: hexutil.Encode([]byte{3}),
|
||||
Amount: "4",
|
||||
},
|
||||
{
|
||||
WithdrawalIndex: "5",
|
||||
ValidatorIndex: "6",
|
||||
ExecutionAddress: hexutil.Encode([]byte{7}),
|
||||
Amount: "8",
|
||||
},
|
||||
}
|
||||
|
||||
result := jsonifyWithdrawals(input)
|
||||
assert.DeepEqual(t, expectedResult, result)
|
||||
}
|
||||
@@ -1,482 +0,0 @@
|
||||
package validator_api
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
enginev1 "github.com/prysmaticlabs/prysm/v5/proto/engine/v1"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
)
|
||||
|
||||
func convertProposerSlashingsToProto(jsonProposerSlashings []*structs.ProposerSlashing) ([]*ethpb.ProposerSlashing, error) {
|
||||
proposerSlashings := make([]*ethpb.ProposerSlashing, len(jsonProposerSlashings))
|
||||
|
||||
for index, jsonProposerSlashing := range jsonProposerSlashings {
|
||||
if jsonProposerSlashing == nil {
|
||||
return nil, errors.Errorf("proposer slashing at index `%d` is nil", index)
|
||||
}
|
||||
|
||||
header1, err := convertProposerSlashingSignedHeaderToProto(jsonProposerSlashing.SignedHeader1)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get proposer header 1")
|
||||
}
|
||||
|
||||
header2, err := convertProposerSlashingSignedHeaderToProto(jsonProposerSlashing.SignedHeader2)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get proposer header 2")
|
||||
}
|
||||
|
||||
proposerSlashings[index] = ðpb.ProposerSlashing{
|
||||
Header_1: header1,
|
||||
Header_2: header2,
|
||||
}
|
||||
}
|
||||
|
||||
return proposerSlashings, nil
|
||||
}
|
||||
|
||||
func convertProposerSlashingSignedHeaderToProto(signedHeader *structs.SignedBeaconBlockHeader) (*ethpb.SignedBeaconBlockHeader, error) {
|
||||
if signedHeader == nil {
|
||||
return nil, errors.New("signed header is nil")
|
||||
}
|
||||
|
||||
if signedHeader.Message == nil {
|
||||
return nil, errors.New("header is nil")
|
||||
}
|
||||
|
||||
slot, err := strconv.ParseUint(signedHeader.Message.Slot, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse header slot `%s`", signedHeader.Message.Slot)
|
||||
}
|
||||
|
||||
proposerIndex, err := strconv.ParseUint(signedHeader.Message.ProposerIndex, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse header proposer index `%s`", signedHeader.Message.ProposerIndex)
|
||||
}
|
||||
|
||||
parentRoot, err := hexutil.Decode(signedHeader.Message.ParentRoot)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode header parent root `%s`", signedHeader.Message.ParentRoot)
|
||||
}
|
||||
|
||||
stateRoot, err := hexutil.Decode(signedHeader.Message.StateRoot)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode header state root `%s`", signedHeader.Message.StateRoot)
|
||||
}
|
||||
|
||||
bodyRoot, err := hexutil.Decode(signedHeader.Message.BodyRoot)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode header body root `%s`", signedHeader.Message.BodyRoot)
|
||||
}
|
||||
|
||||
signature, err := hexutil.Decode(signedHeader.Signature)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode signature `%s`", signedHeader.Signature)
|
||||
}
|
||||
|
||||
return ðpb.SignedBeaconBlockHeader{
|
||||
Header: ðpb.BeaconBlockHeader{
|
||||
Slot: primitives.Slot(slot),
|
||||
ProposerIndex: primitives.ValidatorIndex(proposerIndex),
|
||||
ParentRoot: parentRoot,
|
||||
StateRoot: stateRoot,
|
||||
BodyRoot: bodyRoot,
|
||||
},
|
||||
Signature: signature,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func convertAttesterSlashingsToProto(jsonAttesterSlashings []*structs.AttesterSlashing) ([]*ethpb.AttesterSlashing, error) {
|
||||
attesterSlashings := make([]*ethpb.AttesterSlashing, len(jsonAttesterSlashings))
|
||||
|
||||
for index, jsonAttesterSlashing := range jsonAttesterSlashings {
|
||||
if jsonAttesterSlashing == nil {
|
||||
return nil, errors.Errorf("attester slashing at index `%d` is nil", index)
|
||||
}
|
||||
|
||||
attestation1, err := convertIndexedAttestationToProto(jsonAttesterSlashing.Attestation1)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get attestation 1")
|
||||
}
|
||||
|
||||
attestation2, err := convertIndexedAttestationToProto(jsonAttesterSlashing.Attestation2)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get attestation 2")
|
||||
}
|
||||
|
||||
attesterSlashings[index] = ðpb.AttesterSlashing{
|
||||
Attestation_1: attestation1,
|
||||
Attestation_2: attestation2,
|
||||
}
|
||||
}
|
||||
|
||||
return attesterSlashings, nil
|
||||
}
|
||||
|
||||
func convertIndexedAttestationToProto(jsonAttestation *structs.IndexedAttestation) (*ethpb.IndexedAttestation, error) {
|
||||
if jsonAttestation == nil {
|
||||
return nil, errors.New("indexed attestation is nil")
|
||||
}
|
||||
|
||||
attestingIndices := make([]uint64, len(jsonAttestation.AttestingIndices))
|
||||
|
||||
for index, jsonAttestingIndex := range jsonAttestation.AttestingIndices {
|
||||
attestingIndex, err := strconv.ParseUint(jsonAttestingIndex, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse attesting index `%s`", jsonAttestingIndex)
|
||||
}
|
||||
|
||||
attestingIndices[index] = attestingIndex
|
||||
}
|
||||
|
||||
signature, err := hexutil.Decode(jsonAttestation.Signature)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode attestation signature `%s`", jsonAttestation.Signature)
|
||||
}
|
||||
|
||||
attestationData, err := convertAttestationDataToProto(jsonAttestation.Data)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get attestation data")
|
||||
}
|
||||
|
||||
return ðpb.IndexedAttestation{
|
||||
AttestingIndices: attestingIndices,
|
||||
Data: attestationData,
|
||||
Signature: signature,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func convertCheckpointToProto(jsonCheckpoint *structs.Checkpoint) (*ethpb.Checkpoint, error) {
|
||||
if jsonCheckpoint == nil {
|
||||
return nil, errors.New("checkpoint is nil")
|
||||
}
|
||||
|
||||
epoch, err := strconv.ParseUint(jsonCheckpoint.Epoch, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse checkpoint epoch `%s`", jsonCheckpoint.Epoch)
|
||||
}
|
||||
|
||||
root, err := hexutil.Decode(jsonCheckpoint.Root)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode checkpoint root `%s`", jsonCheckpoint.Root)
|
||||
}
|
||||
|
||||
return ðpb.Checkpoint{
|
||||
Epoch: primitives.Epoch(epoch),
|
||||
Root: root,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func convertAttestationToProto(jsonAttestation *structs.Attestation) (*ethpb.Attestation, error) {
|
||||
if jsonAttestation == nil {
|
||||
return nil, errors.New("json attestation is nil")
|
||||
}
|
||||
|
||||
aggregationBits, err := hexutil.Decode(jsonAttestation.AggregationBits)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode aggregation bits `%s`", jsonAttestation.AggregationBits)
|
||||
}
|
||||
|
||||
attestationData, err := convertAttestationDataToProto(jsonAttestation.Data)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get attestation data")
|
||||
}
|
||||
|
||||
signature, err := hexutil.Decode(jsonAttestation.Signature)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode attestation signature `%s`", jsonAttestation.Signature)
|
||||
}
|
||||
|
||||
return ðpb.Attestation{
|
||||
AggregationBits: aggregationBits,
|
||||
Data: attestationData,
|
||||
Signature: signature,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func convertAttestationElectraToProto(jsonAttestation *structs.AttestationElectra) (*ethpb.AttestationElectra, error) {
|
||||
if jsonAttestation == nil {
|
||||
return nil, errors.New("json attestation is nil")
|
||||
}
|
||||
|
||||
aggregationBits, err := hexutil.Decode(jsonAttestation.AggregationBits)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode aggregation bits `%s`", jsonAttestation.AggregationBits)
|
||||
}
|
||||
|
||||
attestationData, err := convertAttestationDataToProto(jsonAttestation.Data)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get attestation data")
|
||||
}
|
||||
|
||||
signature, err := hexutil.Decode(jsonAttestation.Signature)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode attestation signature `%s`", jsonAttestation.Signature)
|
||||
}
|
||||
|
||||
committeeBits, err := hexutil.Decode(jsonAttestation.CommitteeBits)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode committee bits `%s`", jsonAttestation.CommitteeBits)
|
||||
}
|
||||
|
||||
return ðpb.AttestationElectra{
|
||||
AggregationBits: aggregationBits,
|
||||
Data: attestationData,
|
||||
Signature: signature,
|
||||
CommitteeBits: committeeBits,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func convertAttestationsToProto(jsonAttestations []*structs.Attestation) ([]*ethpb.Attestation, error) {
|
||||
var attestations []*ethpb.Attestation
|
||||
for index, jsonAttestation := range jsonAttestations {
|
||||
if jsonAttestation == nil {
|
||||
return nil, errors.Errorf("attestation at index `%d` is nil", index)
|
||||
}
|
||||
|
||||
attestation, err := convertAttestationToProto(jsonAttestation)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to convert json attestation to proto at index %d", index)
|
||||
}
|
||||
|
||||
attestations = append(attestations, attestation)
|
||||
}
|
||||
|
||||
return attestations, nil
|
||||
}
|
||||
|
||||
func convertAttestationDataToProto(jsonAttestationData *structs.AttestationData) (*ethpb.AttestationData, error) {
|
||||
if jsonAttestationData == nil {
|
||||
return nil, errors.New("attestation data is nil")
|
||||
}
|
||||
|
||||
slot, err := strconv.ParseUint(jsonAttestationData.Slot, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse attestation slot `%s`", jsonAttestationData.Slot)
|
||||
}
|
||||
|
||||
committeeIndex, err := strconv.ParseUint(jsonAttestationData.CommitteeIndex, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse attestation committee index `%s`", jsonAttestationData.CommitteeIndex)
|
||||
}
|
||||
|
||||
beaconBlockRoot, err := hexutil.Decode(jsonAttestationData.BeaconBlockRoot)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode attestation beacon block root `%s`", jsonAttestationData.BeaconBlockRoot)
|
||||
}
|
||||
|
||||
sourceCheckpoint, err := convertCheckpointToProto(jsonAttestationData.Source)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get attestation source checkpoint")
|
||||
}
|
||||
|
||||
targetCheckpoint, err := convertCheckpointToProto(jsonAttestationData.Target)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get attestation target checkpoint")
|
||||
}
|
||||
|
||||
return ðpb.AttestationData{
|
||||
Slot: primitives.Slot(slot),
|
||||
CommitteeIndex: primitives.CommitteeIndex(committeeIndex),
|
||||
BeaconBlockRoot: beaconBlockRoot,
|
||||
Source: sourceCheckpoint,
|
||||
Target: targetCheckpoint,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func convertDepositsToProto(jsonDeposits []*structs.Deposit) ([]*ethpb.Deposit, error) {
|
||||
deposits := make([]*ethpb.Deposit, len(jsonDeposits))
|
||||
|
||||
for depositIndex, jsonDeposit := range jsonDeposits {
|
||||
if jsonDeposit == nil {
|
||||
return nil, errors.Errorf("deposit at index `%d` is nil", depositIndex)
|
||||
}
|
||||
|
||||
proofs := make([][]byte, len(jsonDeposit.Proof))
|
||||
for proofIndex, jsonProof := range jsonDeposit.Proof {
|
||||
proof, err := hexutil.Decode(jsonProof)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode deposit proof `%s`", jsonProof)
|
||||
}
|
||||
|
||||
proofs[proofIndex] = proof
|
||||
}
|
||||
|
||||
if jsonDeposit.Data == nil {
|
||||
return nil, errors.Errorf("deposit data at index `%d` is nil", depositIndex)
|
||||
}
|
||||
|
||||
pubkey, err := hexutil.Decode(jsonDeposit.Data.Pubkey)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode deposit public key `%s`", jsonDeposit.Data.Pubkey)
|
||||
}
|
||||
|
||||
withdrawalCredentials, err := hexutil.Decode(jsonDeposit.Data.WithdrawalCredentials)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode deposit withdrawal credentials `%s`", jsonDeposit.Data.WithdrawalCredentials)
|
||||
}
|
||||
|
||||
amount, err := strconv.ParseUint(jsonDeposit.Data.Amount, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse deposit amount `%s`", jsonDeposit.Data.Amount)
|
||||
}
|
||||
|
||||
signature, err := hexutil.Decode(jsonDeposit.Data.Signature)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode signature `%s`", jsonDeposit.Data.Signature)
|
||||
}
|
||||
|
||||
deposits[depositIndex] = ðpb.Deposit{
|
||||
Proof: proofs,
|
||||
Data: ðpb.Deposit_Data{
|
||||
PublicKey: pubkey,
|
||||
WithdrawalCredentials: withdrawalCredentials,
|
||||
Amount: amount,
|
||||
Signature: signature,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return deposits, nil
|
||||
}
|
||||
|
||||
func convertVoluntaryExitsToProto(jsonVoluntaryExits []*structs.SignedVoluntaryExit) ([]*ethpb.SignedVoluntaryExit, error) {
|
||||
attestingIndices := make([]*ethpb.SignedVoluntaryExit, len(jsonVoluntaryExits))
|
||||
|
||||
for index, jsonVoluntaryExit := range jsonVoluntaryExits {
|
||||
if jsonVoluntaryExit == nil {
|
||||
return nil, errors.Errorf("signed voluntary exit at index `%d` is nil", index)
|
||||
}
|
||||
|
||||
if jsonVoluntaryExit.Message == nil {
|
||||
return nil, errors.Errorf("voluntary exit at index `%d` is nil", index)
|
||||
}
|
||||
|
||||
epoch, err := strconv.ParseUint(jsonVoluntaryExit.Message.Epoch, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse voluntary exit epoch `%s`", jsonVoluntaryExit.Message.Epoch)
|
||||
}
|
||||
|
||||
validatorIndex, err := strconv.ParseUint(jsonVoluntaryExit.Message.ValidatorIndex, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse voluntary exit validator index `%s`", jsonVoluntaryExit.Message.ValidatorIndex)
|
||||
}
|
||||
|
||||
signature, err := hexutil.Decode(jsonVoluntaryExit.Signature)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode signature `%s`", jsonVoluntaryExit.Signature)
|
||||
}
|
||||
|
||||
attestingIndices[index] = ðpb.SignedVoluntaryExit{
|
||||
Exit: ðpb.VoluntaryExit{
|
||||
Epoch: primitives.Epoch(epoch),
|
||||
ValidatorIndex: primitives.ValidatorIndex(validatorIndex),
|
||||
},
|
||||
Signature: signature,
|
||||
}
|
||||
}
|
||||
|
||||
return attestingIndices, nil
|
||||
}
|
||||
|
||||
func convertTransactionsToProto(jsonTransactions []string) ([][]byte, error) {
|
||||
transactions := make([][]byte, len(jsonTransactions))
|
||||
|
||||
for index, jsonTransaction := range jsonTransactions {
|
||||
transaction, err := hexutil.Decode(jsonTransaction)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode transaction `%s`", jsonTransaction)
|
||||
}
|
||||
|
||||
transactions[index] = transaction
|
||||
}
|
||||
|
||||
return transactions, nil
|
||||
}
|
||||
|
||||
func convertWithdrawalsToProto(jsonWithdrawals []*structs.Withdrawal) ([]*enginev1.Withdrawal, error) {
|
||||
withdrawals := make([]*enginev1.Withdrawal, len(jsonWithdrawals))
|
||||
|
||||
for index, jsonWithdrawal := range jsonWithdrawals {
|
||||
if jsonWithdrawal == nil {
|
||||
return nil, errors.Errorf("withdrawal at index `%d` is nil", index)
|
||||
}
|
||||
|
||||
withdrawalIndex, err := strconv.ParseUint(jsonWithdrawal.WithdrawalIndex, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse withdrawal index `%s`", jsonWithdrawal.WithdrawalIndex)
|
||||
}
|
||||
|
||||
validatorIndex, err := strconv.ParseUint(jsonWithdrawal.ValidatorIndex, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse validator index `%s`", jsonWithdrawal.ValidatorIndex)
|
||||
}
|
||||
|
||||
executionAddress, err := hexutil.Decode(jsonWithdrawal.ExecutionAddress)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode execution address `%s`", jsonWithdrawal.ExecutionAddress)
|
||||
}
|
||||
|
||||
amount, err := strconv.ParseUint(jsonWithdrawal.Amount, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse withdrawal amount `%s`", jsonWithdrawal.Amount)
|
||||
}
|
||||
|
||||
withdrawals[index] = &enginev1.Withdrawal{
|
||||
Index: withdrawalIndex,
|
||||
ValidatorIndex: primitives.ValidatorIndex(validatorIndex),
|
||||
Address: executionAddress,
|
||||
Amount: amount,
|
||||
}
|
||||
}
|
||||
|
||||
return withdrawals, nil
|
||||
}
|
||||
|
||||
func convertBlsToExecutionChangesToProto(jsonSignedBlsToExecutionChanges []*structs.SignedBLSToExecutionChange) ([]*ethpb.SignedBLSToExecutionChange, error) {
|
||||
signedBlsToExecutionChanges := make([]*ethpb.SignedBLSToExecutionChange, len(jsonSignedBlsToExecutionChanges))
|
||||
|
||||
for index, jsonBlsToExecutionChange := range jsonSignedBlsToExecutionChanges {
|
||||
if jsonBlsToExecutionChange == nil {
|
||||
return nil, errors.Errorf("bls to execution change at index `%d` is nil", index)
|
||||
}
|
||||
|
||||
if jsonBlsToExecutionChange.Message == nil {
|
||||
return nil, errors.Errorf("bls to execution change message at index `%d` is nil", index)
|
||||
}
|
||||
|
||||
validatorIndex, err := strconv.ParseUint(jsonBlsToExecutionChange.Message.ValidatorIndex, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode validator index `%s`", jsonBlsToExecutionChange.Message.ValidatorIndex)
|
||||
}
|
||||
|
||||
fromBlsPubkey, err := hexutil.Decode(jsonBlsToExecutionChange.Message.FromBLSPubkey)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode bls pubkey `%s`", jsonBlsToExecutionChange.Message.FromBLSPubkey)
|
||||
}
|
||||
|
||||
toExecutionAddress, err := hexutil.Decode(jsonBlsToExecutionChange.Message.ToExecutionAddress)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode execution address `%s`", jsonBlsToExecutionChange.Message.ToExecutionAddress)
|
||||
}
|
||||
|
||||
signature, err := hexutil.Decode(jsonBlsToExecutionChange.Signature)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to decode signature `%s`", jsonBlsToExecutionChange.Signature)
|
||||
}
|
||||
|
||||
signedBlsToExecutionChanges[index] = ðpb.SignedBLSToExecutionChange{
|
||||
Message: ðpb.BLSToExecutionChange{
|
||||
ValidatorIndex: primitives.ValidatorIndex(validatorIndex),
|
||||
FromBlsPubkey: fromBlsPubkey,
|
||||
ToExecutionAddress: toExecutionAddress,
|
||||
},
|
||||
Signature: signature,
|
||||
}
|
||||
}
|
||||
|
||||
return signedBlsToExecutionChanges, nil
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user