mirror of
https://github.com/OffchainLabs/prysm.git
synced 2026-01-09 21:38:05 -05:00
Compare commits
404 Commits
mac-zig-fi
...
increase_h
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
32e1273546 | ||
|
|
7d750dbc40 | ||
|
|
ec8b67cb12 | ||
|
|
a817aa0a8d | ||
|
|
d76f55e97a | ||
|
|
2de21eb22f | ||
|
|
58b8c31c93 | ||
|
|
f343333880 | ||
|
|
8e0b1b7e1f | ||
|
|
65f71b3a48 | ||
|
|
9fcb9b86af | ||
|
|
aa63c4e7f2 | ||
|
|
d6ae838bbf | ||
|
|
d49afb370c | ||
|
|
4d3a6d84d2 | ||
|
|
9c5d16e161 | ||
|
|
4731304187 | ||
|
|
02cbcf8545 | ||
|
|
4e10734ae4 | ||
|
|
e19c99c3e2 | ||
|
|
697bcd418c | ||
|
|
ec7949fa4b | ||
|
|
cb8eb4e955 | ||
|
|
800f3b572f | ||
|
|
9d3af41acb | ||
|
|
07a0a95ee7 | ||
|
|
9e7352704c | ||
|
|
2616de1eb1 | ||
|
|
b2e3c29ab3 | ||
|
|
83538251aa | ||
|
|
2442280e37 | ||
|
|
4608569495 | ||
|
|
20d013a30b | ||
|
|
b0a2115a26 | ||
|
|
102518e106 | ||
|
|
e49ed4d554 | ||
|
|
21775eed52 | ||
|
|
ee9274a9bc | ||
|
|
ef21d3adf8 | ||
|
|
b6ce6c2eba | ||
|
|
b3caaa9acc | ||
|
|
d6fb8c29c9 | ||
|
|
3df7a1f067 | ||
|
|
4c3dbae3c0 | ||
|
|
68b78dd520 | ||
|
|
2e2ef4a179 | ||
|
|
b61d17731e | ||
|
|
6d3c6a6331 | ||
|
|
f1615c4c88 | ||
|
|
87b127365f | ||
|
|
5215ed03fd | ||
|
|
0453d18395 | ||
|
|
0132c1b17d | ||
|
|
d9d2ee75de | ||
|
|
ddb321e0ce | ||
|
|
5735379963 | ||
|
|
1d5a09c05d | ||
|
|
70e1b11aeb | ||
|
|
e100fb0c08 | ||
|
|
789c3f8078 | ||
|
|
0b261cba5e | ||
|
|
7a9608ea20 | ||
|
|
f795e09ecf | ||
|
|
e6a6365bdd | ||
|
|
4c66e4d060 | ||
|
|
daad29d0de | ||
|
|
9f67ad9496 | ||
|
|
0ee0653a15 | ||
|
|
4ff91bebf8 | ||
|
|
f85e027141 | ||
|
|
e09ae75c9f | ||
|
|
cb80d5ad32 | ||
|
|
24b029bbef | ||
|
|
04dd60d1bf | ||
|
|
92303b610d | ||
|
|
41d97a2a27 | ||
|
|
b842d8ce1d | ||
|
|
4030614df0 | ||
|
|
1ec745b88e | ||
|
|
05b2795844 | ||
|
|
2d0fe20917 | ||
|
|
3a2734f249 | ||
|
|
50d1961e52 | ||
|
|
0dfe19c3dd | ||
|
|
e5394fe081 | ||
|
|
f09fe4f038 | ||
|
|
6c5351c3a2 | ||
|
|
5a66807989 | ||
|
|
bb66201c2c | ||
|
|
5de22d22bc | ||
|
|
1eb373103a | ||
|
|
b18026fc00 | ||
|
|
8a89173937 | ||
|
|
3d13c69ef3 | ||
|
|
f2f10e7381 | ||
|
|
9aa55b47f3 | ||
|
|
c4c6b47d9b | ||
|
|
06a5548424 | ||
|
|
5e74c798d4 | ||
|
|
7b955c94ec | ||
|
|
256a05bfd5 | ||
|
|
03068ba781 | ||
|
|
5df8b83a05 | ||
|
|
db653b8863 | ||
|
|
af203efa0c | ||
|
|
5582c558c6 | ||
|
|
573d9739ea | ||
|
|
bb18fa3f71 | ||
|
|
6a605e6b6d | ||
|
|
a0787e2379 | ||
|
|
621bda068d | ||
|
|
91504eb95a | ||
|
|
5afb1255fe | ||
|
|
9d6160e112 | ||
|
|
1383546999 | ||
|
|
01116f7f82 | ||
|
|
692ebd313f | ||
|
|
6fa656c1ee | ||
|
|
55a29a4670 | ||
|
|
e2e7e84a96 | ||
|
|
91b0a93df7 | ||
|
|
8839015312 | ||
|
|
61ab4bf7ca | ||
|
|
e3ce1bde45 | ||
|
|
9d1189b222 | ||
|
|
74f5452a64 | ||
|
|
ea1204d3c7 | ||
|
|
d9ac69752b | ||
|
|
52af63f25a | ||
|
|
2dad245bc8 | ||
|
|
9a9990605c | ||
|
|
2cddb5ca86 | ||
|
|
73ce28c356 | ||
|
|
7a294e861e | ||
|
|
258123341e | ||
|
|
224b136737 | ||
|
|
3ed4866eec | ||
|
|
373c853d17 | ||
|
|
23b0718b5f | ||
|
|
3a9854145c | ||
|
|
1b70d2b566 | ||
|
|
59b310a221 | ||
|
|
22b6d1751d | ||
|
|
9c13d47f4c | ||
|
|
835dce5f6e | ||
|
|
c4c28e4825 | ||
|
|
c996109b3a | ||
|
|
e397f8a2bd | ||
|
|
6438060733 | ||
|
|
a2892b1ed5 | ||
|
|
f4ab2ca79f | ||
|
|
dbcf5c29cd | ||
|
|
c9fe53bc32 | ||
|
|
8522febd88 | ||
|
|
75a28310c2 | ||
|
|
1df173e701 | ||
|
|
3187a05a76 | ||
|
|
4e24102237 | ||
|
|
8dd5e96b29 | ||
|
|
4afb379f8d | ||
|
|
5a2453ac9c | ||
|
|
e610d2a5de | ||
|
|
233aaf2f9e | ||
|
|
a49bdcaa1f | ||
|
|
bdd7b2caa9 | ||
|
|
8de0e3804b | ||
|
|
bfb648067b | ||
|
|
852db1f3eb | ||
|
|
5d3663ef8d | ||
|
|
a608630727 | ||
|
|
37739b4193 | ||
|
|
4d2067dbae | ||
|
|
fc05e306dd | ||
|
|
204de13c86 | ||
|
|
f3ef1b64d6 | ||
|
|
c3dbfa66d0 | ||
|
|
93aba997f4 | ||
|
|
79bb7efbf8 | ||
|
|
87b53db3b4 | ||
|
|
fe431b9201 | ||
|
|
790a09f9b1 | ||
|
|
46387a903a | ||
|
|
6a65e07684 | ||
|
|
abef94d7ad | ||
|
|
99a8d0bac6 | ||
|
|
b585ff77f5 | ||
|
|
1ff5a43385 | ||
|
|
0cfbddc980 | ||
|
|
22a484c45e | ||
|
|
6ddafe1159 | ||
|
|
b8c5af665f | ||
|
|
2875ce6ee1 | ||
|
|
a883ae2a76 | ||
|
|
3a2b486bde | ||
|
|
283e09569d | ||
|
|
69723b4a77 | ||
|
|
4fe6834ba5 | ||
|
|
98e3f2b80f | ||
|
|
2aef7a3ec5 | ||
|
|
c41a54be9d | ||
|
|
7e65378f63 | ||
|
|
cf606e3766 | ||
|
|
703cfc5819 | ||
|
|
c6ebe157a6 | ||
|
|
a3cc81a048 | ||
|
|
75bbeb61cc | ||
|
|
5cea6bebb8 | ||
|
|
28596d669b | ||
|
|
0e043d55b4 | ||
|
|
8d092a1113 | ||
|
|
073c4edc5f | ||
|
|
d055db1c31 | ||
|
|
a974627258 | ||
|
|
67dccc5e43 | ||
|
|
ff06e08274 | ||
|
|
d3d25e3ae5 | ||
|
|
929e9ddf4c | ||
|
|
7c0e79d432 | ||
|
|
3c1c0b3c00 | ||
|
|
d439e6da74 | ||
|
|
e68b2821c1 | ||
|
|
cfef8f4676 | ||
|
|
9709412511 | ||
|
|
7781eb60f4 | ||
|
|
396b8bf970 | ||
|
|
d5107942a1 | ||
|
|
bd4a520013 | ||
|
|
a0ff1351a0 | ||
|
|
7e6fd5fd8b | ||
|
|
d984210baa | ||
|
|
31c72672d7 | ||
|
|
8c1e180dd1 | ||
|
|
886d76fe7c | ||
|
|
a602acf492 | ||
|
|
1b6547de6a | ||
|
|
88685bb3bd | ||
|
|
2319b7d4bd | ||
|
|
82b2840d68 | ||
|
|
cf221d0f4c | ||
|
|
0956e3a657 | ||
|
|
351ed1c511 | ||
|
|
9809f5ac77 | ||
|
|
cff5e2b5fe | ||
|
|
dd15f9e0cc | ||
|
|
1c9ded4684 | ||
|
|
d4cc6fcf4a | ||
|
|
49c16f1a71 | ||
|
|
e70b606e78 | ||
|
|
0e8b37c317 | ||
|
|
e80db9554d | ||
|
|
d0bf03e863 | ||
|
|
b7e0819f00 | ||
|
|
7d64104003 | ||
|
|
b1e8a9ea3d | ||
|
|
cc1028ca3c | ||
|
|
233f4d99a2 | ||
|
|
a068f3877e | ||
|
|
856907d760 | ||
|
|
c6801df05a | ||
|
|
bc7b15b04e | ||
|
|
eb713d1177 | ||
|
|
844b2c6602 | ||
|
|
9efaa832cd | ||
|
|
e9d26c61d7 | ||
|
|
374d77f437 | ||
|
|
1f6d1d1852 | ||
|
|
0eff83cb9d | ||
|
|
ffe2f6b732 | ||
|
|
d57bca97a5 | ||
|
|
b45a6664be | ||
|
|
c56abfb840 | ||
|
|
d70f477b1e | ||
|
|
db096488b0 | ||
|
|
344e68b81b | ||
|
|
1962cca69e | ||
|
|
4a374435c0 | ||
|
|
0fde4a22e1 | ||
|
|
62ecc0d177 | ||
|
|
97dfec84f6 | ||
|
|
53bc96844e | ||
|
|
ddcf0c18dc | ||
|
|
45a2746d0e | ||
|
|
09f3df309d | ||
|
|
96df81d5c5 | ||
|
|
c47c52152b | ||
|
|
4cbe144a6c | ||
|
|
52b9b65adb | ||
|
|
ea59b1ec71 | ||
|
|
175c484c44 | ||
|
|
8aaab86987 | ||
|
|
381116a3e8 | ||
|
|
3d61fd0436 | ||
|
|
b19d24c581 | ||
|
|
8387088a52 | ||
|
|
ce7452c97a | ||
|
|
5e56b5fdd7 | ||
|
|
bfaba378f6 | ||
|
|
3bd116db16 | ||
|
|
7d2ddaee43 | ||
|
|
122a7782ff | ||
|
|
9b1b6f9be6 | ||
|
|
0eb08a4f96 | ||
|
|
bdfa06ed65 | ||
|
|
a94f2b93e3 | ||
|
|
4c47756aed | ||
|
|
440841d565 | ||
|
|
ff99616833 | ||
|
|
f537a98fcd | ||
|
|
cee38660c7 | ||
|
|
481d77bfde | ||
|
|
590317553c | ||
|
|
68b7d1009e | ||
|
|
b5b8825cc8 | ||
|
|
382b8b23c2 | ||
|
|
40a3ebab91 | ||
|
|
83af9a5694 | ||
|
|
6a45323ab7 | ||
|
|
4008ea736f | ||
|
|
4e4fb9ad52 | ||
|
|
737e0e0d3a | ||
|
|
604c82626f | ||
|
|
e1a3852f08 | ||
|
|
a40cc40edf | ||
|
|
0e3c1d42f6 | ||
|
|
f41e603e5a | ||
|
|
28c3330375 | ||
|
|
cb465086e8 | ||
|
|
cdd7958739 | ||
|
|
97a522827b | ||
|
|
b84b795f23 | ||
|
|
f40b8583f7 | ||
|
|
a6b6a938de | ||
|
|
c78d698d89 | ||
|
|
705e98e3c3 | ||
|
|
ce2344301c | ||
|
|
1112e01c06 | ||
|
|
243bcb03ce | ||
|
|
a0ca4a67b0 | ||
|
|
b68a4e12aa | ||
|
|
c010601f3b | ||
|
|
394bd1786a | ||
|
|
461af4baa6 | ||
|
|
7a70305935 | ||
|
|
83ce7e3607 | ||
|
|
cf8e554981 | ||
|
|
59aa978223 | ||
|
|
e4a5711c8f | ||
|
|
d8b38cf230 | ||
|
|
ca36634de6 | ||
|
|
1c35b66132 | ||
|
|
56c1f9aab5 | ||
|
|
5ecb4d62a9 | ||
|
|
bc107a61e3 | ||
|
|
9f41375550 | ||
|
|
6a638bd148 | ||
|
|
52f1b3f958 | ||
|
|
80526a1899 | ||
|
|
da2212f6cc | ||
|
|
7cc05401ca | ||
|
|
cd8d499198 | ||
|
|
2fbda536b0 | ||
|
|
0498e0a4d5 | ||
|
|
098d6a3c0b | ||
|
|
67d0b26a21 | ||
|
|
6c85587d14 | ||
|
|
6daa72634d | ||
|
|
07ee42660a | ||
|
|
4b5db8003b | ||
|
|
4b3c511a26 | ||
|
|
8902ad3a20 | ||
|
|
1123df7432 | ||
|
|
a7edec9b98 | ||
|
|
10ccf1840f | ||
|
|
d035be29cd | ||
|
|
bba8dd6f1e | ||
|
|
8f5ae760ee | ||
|
|
5ba91a5216 | ||
|
|
4c381938e1 | ||
|
|
d4726f2866 | ||
|
|
4e3419e870 | ||
|
|
ac06362baf | ||
|
|
28aa11c976 | ||
|
|
798d5ec585 | ||
|
|
9b97f3fd92 | ||
|
|
0946b5853f | ||
|
|
1530d17977 | ||
|
|
e46f9c5631 | ||
|
|
3097601530 | ||
|
|
4a515c36e6 | ||
|
|
afaeff9d4c | ||
|
|
f663f605d2 | ||
|
|
12f7143c4f | ||
|
|
1f250f7e89 | ||
|
|
0f65e51d1e | ||
|
|
d1dd8471a3 | ||
|
|
7a6487b746 | ||
|
|
daa6d2e741 | ||
|
|
8a743a6430 | ||
|
|
c0fb16a96f | ||
|
|
57eda1de63 | ||
|
|
a54e61ecb0 | ||
|
|
27b4e32e1c | ||
|
|
b56bf00682 | ||
|
|
b24b60dbd8 |
7
.bazelrc
7
.bazelrc
@@ -6,6 +6,12 @@ import %workspace%/build/bazelrc/debug.bazelrc
|
||||
import %workspace%/build/bazelrc/hermetic-cc.bazelrc
|
||||
import %workspace%/build/bazelrc/performance.bazelrc
|
||||
|
||||
# hermetic_cc_toolchain v3.0.1 required changes.
|
||||
common --enable_platform_specific_config
|
||||
build:linux --sandbox_add_mount_pair=/tmp
|
||||
build:macos --sandbox_add_mount_pair=/var/tmp
|
||||
build:windows --sandbox_add_mount_pair=C:\Temp
|
||||
|
||||
# E2E run with debug gotag
|
||||
test:e2e --define gotags=debug
|
||||
|
||||
@@ -27,6 +33,7 @@ build:minimal --@io_bazel_rules_go//go/config:tags=minimal
|
||||
# Release flags
|
||||
build:release --compilation_mode=opt
|
||||
build:release --stamp
|
||||
build:release --define pgo_enabled=1
|
||||
|
||||
# Build binary with cgo symbolizer for debugging / profiling.
|
||||
build:cgo_symbolizer --copt=-g
|
||||
|
||||
@@ -1 +1 @@
|
||||
6.3.2
|
||||
7.1.0
|
||||
|
||||
@@ -9,8 +9,8 @@
|
||||
#build:remote-cache --strategy=Genrule=standalone
|
||||
|
||||
# Prysm specific remote-cache properties.
|
||||
#build:remote-cache --disk_cache=
|
||||
build:remote-cache --remote_download_toplevel
|
||||
build:remote-cache --remote_download_minimal
|
||||
build:remote-cache --remote_build_event_upload=minimal
|
||||
build:remote-cache --remote_cache=grpc://bazel-remote-cache:9092
|
||||
# Does not work with rules_oci. See https://github.com/bazel-contrib/rules_oci/issues/292
|
||||
#build:remote-cache --experimental_remote_downloader=grpc://bazel-remote-cache:9092
|
||||
@@ -29,7 +29,10 @@ build --experimental_use_hermetic_linux_sandbox
|
||||
# Import workspace options.
|
||||
import %workspace%/.bazelrc
|
||||
|
||||
startup --host_jvm_args=-Xmx4g --host_jvm_args=-Xms2g
|
||||
# Enable blake3 once it is supported in remote cache. See: https://github.com/buchgr/bazel-remote/issues/710
|
||||
# startup --digest_function=blake3
|
||||
|
||||
startup --host_jvm_args=-Xmx8g --host_jvm_args=-Xms4g
|
||||
build --experimental_strict_action_env
|
||||
build --sandbox_tmpfs_path=/tmp
|
||||
build --verbose_failures
|
||||
@@ -39,6 +42,7 @@ build --curses=no --color=no
|
||||
build --keep_going
|
||||
build --test_output=errors
|
||||
build --flaky_test_attempts=5
|
||||
build --build_runfile_links=false # Only build runfile symlink forest when required by local action, test, or run command.
|
||||
# Disabled race detection due to unstable test results under constrained environment build kite
|
||||
# build --features=race
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ name = "go"
|
||||
enabled = true
|
||||
|
||||
[analyzers.meta]
|
||||
import_paths = ["github.com/prysmaticlabs/prysm/v4"]
|
||||
import_paths = ["github.com/prysmaticlabs/prysm/v5"]
|
||||
|
||||
[[analyzers]]
|
||||
name = "test-coverage"
|
||||
|
||||
2
.github/actions/gomodtidy/Dockerfile
vendored
2
.github/actions/gomodtidy/Dockerfile
vendored
@@ -1,4 +1,4 @@
|
||||
FROM golang:alpine
|
||||
FROM golang:1.21-alpine
|
||||
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
|
||||
|
||||
4
.github/workflows/fuzz.yml
vendored
4
.github/workflows/fuzz.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: '1.20'
|
||||
go-version: '1.21.5'
|
||||
- id: list
|
||||
uses: shogo82148/actions-go-fuzz/list@v0
|
||||
with:
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: '1.20'
|
||||
go-version: '1.21.5'
|
||||
- uses: shogo82148/actions-go-fuzz/run@v0
|
||||
with:
|
||||
packages: ${{ matrix.package }}
|
||||
|
||||
16
.github/workflows/go.yml
vendored
16
.github/workflows/go.yml
vendored
@@ -5,6 +5,8 @@ on:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
branches: [ '*' ]
|
||||
merge_group:
|
||||
types: [checks_requested]
|
||||
|
||||
jobs:
|
||||
formatting:
|
||||
@@ -26,15 +28,15 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Go 1.20
|
||||
- name: Set up Go 1.21
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: '1.20'
|
||||
go-version: '1.21.5'
|
||||
- name: Run Gosec Security Scanner
|
||||
run: | # https://github.com/securego/gosec/issues/469
|
||||
export PATH=$PATH:$(go env GOPATH)/bin
|
||||
go install github.com/securego/gosec/v2/cmd/gosec@v2.15.0
|
||||
gosec -exclude=G307 -exclude-dir=crypto/bls/herumi ./...
|
||||
gosec -exclude-generated -exclude=G307 -exclude-dir=crypto/bls/herumi ./...
|
||||
|
||||
lint:
|
||||
name: Lint
|
||||
@@ -43,16 +45,16 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Go 1.20
|
||||
- name: Set up Go 1.21
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: '1.20'
|
||||
go-version: '1.21.5'
|
||||
id: go
|
||||
|
||||
- name: Golangci-lint
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
version: v1.52.2
|
||||
version: v1.55.2
|
||||
args: --config=.golangci.yml --out-${NO_FUTURE}format colored-line-number
|
||||
|
||||
build:
|
||||
@@ -62,7 +64,7 @@ jobs:
|
||||
- name: Set up Go 1.x
|
||||
uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: '1.20'
|
||||
go-version: '1.21.5'
|
||||
id: go
|
||||
|
||||
- name: Check out code into the Go module directory
|
||||
|
||||
@@ -6,21 +6,82 @@ run:
|
||||
- proto
|
||||
- tools/analyzers
|
||||
timeout: 10m
|
||||
go: '1.19'
|
||||
go: '1.21.5'
|
||||
|
||||
linters:
|
||||
disable-all: true
|
||||
enable:
|
||||
- gofmt
|
||||
- goimports
|
||||
- unused
|
||||
- errcheck
|
||||
- gosimple
|
||||
- gocognit
|
||||
- dupword
|
||||
- nilerr
|
||||
- whitespace
|
||||
- misspell
|
||||
enable-all: true
|
||||
disable:
|
||||
# Deprecated linters:
|
||||
- deadcode
|
||||
- exhaustivestruct
|
||||
- golint
|
||||
- govet
|
||||
- ifshort
|
||||
- interfacer
|
||||
- maligned
|
||||
- nosnakecase
|
||||
- scopelint
|
||||
- structcheck
|
||||
- varcheck
|
||||
|
||||
# Disabled for now:
|
||||
- asasalint
|
||||
- bodyclose
|
||||
- containedctx
|
||||
- contextcheck
|
||||
- cyclop
|
||||
- depguard
|
||||
- dogsled
|
||||
- dupl
|
||||
- durationcheck
|
||||
- errorlint
|
||||
- exhaustive
|
||||
- exhaustruct
|
||||
- forbidigo
|
||||
- forcetypeassert
|
||||
- funlen
|
||||
- gci
|
||||
- gochecknoglobals
|
||||
- gochecknoinits
|
||||
- goconst
|
||||
- gocritic
|
||||
- gocyclo
|
||||
- godot
|
||||
- godox
|
||||
- goerr113
|
||||
- gofumpt
|
||||
- gomnd
|
||||
- gomoddirectives
|
||||
- inamedparam
|
||||
- interfacebloat
|
||||
- ireturn
|
||||
- lll
|
||||
- maintidx
|
||||
- makezero
|
||||
- musttag
|
||||
- nakedret
|
||||
- nestif
|
||||
- nilnil
|
||||
- nlreturn
|
||||
- noctx
|
||||
- nolintlint
|
||||
- nonamedreturns
|
||||
- nosprintfhostport
|
||||
- perfsprint
|
||||
- prealloc
|
||||
- predeclared
|
||||
- promlinter
|
||||
- protogetter
|
||||
- revive
|
||||
- staticcheck
|
||||
- stylecheck
|
||||
- tagalign
|
||||
- tagliatelle
|
||||
- thelper
|
||||
- unparam
|
||||
- varnamelen
|
||||
- wrapcheck
|
||||
- wsl
|
||||
|
||||
linters-settings:
|
||||
gocognit:
|
||||
|
||||
192
BUILD.bazel
192
BUILD.bazel
@@ -4,6 +4,7 @@ load("@com_github_atlassian_bazel_tools//goimports:def.bzl", "goimports")
|
||||
load("@io_kubernetes_build//defs:run_in_workspace.bzl", "workspace_binary")
|
||||
load("@io_bazel_rules_go//go:def.bzl", "nogo")
|
||||
load("@bazel_skylib//rules:common_settings.bzl", "string_setting")
|
||||
load("@prysm//tools/nogo_config:def.bzl", "nogo_config_exclude")
|
||||
|
||||
prefix = "github.com/prysmaticlabs/prysm"
|
||||
|
||||
@@ -11,7 +12,7 @@ exports_files([
|
||||
"LICENSE.md",
|
||||
])
|
||||
|
||||
# gazelle:prefix github.com/prysmaticlabs/prysm/v4
|
||||
# gazelle:prefix github.com/prysmaticlabs/prysm/v5
|
||||
# gazelle:map_kind go_library go_library @prysm//tools/go:def.bzl
|
||||
# gazelle:map_kind go_test go_test @prysm//tools/go:def.bzl
|
||||
# gazelle:map_kind go_repository go_repository @prysm//tools/go:def.bzl
|
||||
@@ -82,38 +83,117 @@ workspace_binary(
|
||||
cmd = "@com_github_golang_lint//golint",
|
||||
)
|
||||
|
||||
STATICCHECK_ANALYZERS = [
|
||||
# Enabled static checks. See https://staticcheck.dev/docs/checks/
|
||||
# Please. keep this list sorted. Don't be a bad person by inserting stuff randomly.
|
||||
"sa1000",
|
||||
"sa1001",
|
||||
"sa1002",
|
||||
"sa1003",
|
||||
"sa1004",
|
||||
"sa1005",
|
||||
"sa1006",
|
||||
"sa1007",
|
||||
"sa1008",
|
||||
"sa1010",
|
||||
"sa1011",
|
||||
"sa1012",
|
||||
"sa1013",
|
||||
"sa1014",
|
||||
"sa1015",
|
||||
"sa1016",
|
||||
"sa1017",
|
||||
"sa1018",
|
||||
# "sa1019", # TODO: Fix all uses of deprecated things.
|
||||
"sa1020",
|
||||
"sa1021",
|
||||
"sa1023",
|
||||
"sa1024",
|
||||
"sa1025",
|
||||
"sa1026",
|
||||
"sa1027",
|
||||
"sa1028",
|
||||
"sa1029",
|
||||
"sa1030",
|
||||
"sa2000",
|
||||
"sa2001",
|
||||
"sa2002",
|
||||
"sa2003",
|
||||
"sa3000",
|
||||
"sa3001",
|
||||
"sa4000",
|
||||
"sa4001",
|
||||
"sa4003",
|
||||
"sa4004",
|
||||
"sa4005",
|
||||
"sa4006",
|
||||
"sa4008",
|
||||
"sa4009",
|
||||
"sa4010",
|
||||
"sa4011",
|
||||
"sa4012",
|
||||
"sa4013",
|
||||
"sa4014",
|
||||
"sa4015",
|
||||
"sa4016",
|
||||
"sa4017",
|
||||
"sa4018",
|
||||
"sa4019",
|
||||
"sa4020",
|
||||
"sa4021",
|
||||
"sa4022",
|
||||
"sa4023",
|
||||
"sa4024",
|
||||
"sa4025",
|
||||
"sa4026",
|
||||
"sa4027",
|
||||
"sa4028",
|
||||
"sa4029",
|
||||
"sa4030",
|
||||
"sa4031",
|
||||
"sa4032",
|
||||
"sa5000",
|
||||
"sa5001",
|
||||
"sa5002",
|
||||
"sa5003",
|
||||
"sa5004",
|
||||
"sa5005",
|
||||
"sa5007",
|
||||
"sa5008",
|
||||
"sa5009",
|
||||
"sa5010",
|
||||
"sa5011",
|
||||
"sa5012",
|
||||
"sa6000",
|
||||
"sa6001",
|
||||
"sa6002",
|
||||
"sa6003",
|
||||
"sa6005",
|
||||
"sa6006",
|
||||
"sa9001",
|
||||
"sa9002",
|
||||
#"sa9003", # Doesn't build. See https://github.com/dominikh/go-tools/pull/1483
|
||||
"sa9004",
|
||||
"sa9005",
|
||||
"sa9006",
|
||||
"sa9007",
|
||||
"sa9008",
|
||||
]
|
||||
|
||||
nogo_config_exclude(
|
||||
name = "nogo_config_with_excludes",
|
||||
checks = [sa.upper() for sa in STATICCHECK_ANALYZERS],
|
||||
exclude_files = [
|
||||
"external/.*",
|
||||
],
|
||||
input = "nogo_config.json",
|
||||
)
|
||||
|
||||
nogo(
|
||||
name = "nogo",
|
||||
config = "nogo_config.json",
|
||||
config = ":nogo_config_with_excludes",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"@org_golang_x_tools//go/analysis/passes/unsafeptr:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/unreachable:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/unmarshal:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/tests:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/structtag:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/stdmethods:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/shift:go_default_library",
|
||||
# "@org_golang_x_tools//go/analysis/passes/shadow:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/printf:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/pkgfact:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/nilness:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/nilfunc:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/loopclosure:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/httpresponse:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/findcall:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/deepequalerrors:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/ctrlflow:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/copylock:go_default_library",
|
||||
# "@org_golang_x_tools//go/analysis/passes/cgocall:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/buildtag:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/buildssa:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/bools:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/atomicalign:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/atomic:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/assign:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/inspect:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/asmdecl:go_default_library",
|
||||
"//tools/analyzers/comparesame:go_default_library",
|
||||
"//tools/analyzers/cryptorand:go_default_library",
|
||||
"//tools/analyzers/errcheck:go_default_library",
|
||||
@@ -129,6 +209,53 @@ nogo(
|
||||
"//tools/analyzers/shadowpredecl:go_default_library",
|
||||
"//tools/analyzers/slicedirect:go_default_library",
|
||||
"//tools/analyzers/uintcast:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/appends:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/asmdecl:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/assign:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/atomic:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/atomicalign:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/bools:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/buildssa:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/buildtag:go_default_library",
|
||||
# cgocall disabled
|
||||
#"@org_golang_x_tools//go/analysis/passes/cgocall:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/copylock:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/ctrlflow:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/deepequalerrors:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/defers:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/directive:go_default_library",
|
||||
# fieldalignment disabled
|
||||
#"@org_golang_x_tools//go/analysis/passes/fieldalignment:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/findcall:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/framepointer:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/httpmux:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/httpresponse:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/ifaceassert:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/inspect:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/loopclosure:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/nilfunc:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/nilness:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/pkgfact:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/printf:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/reflectvaluecompare:go_default_library",
|
||||
# shadow disabled
|
||||
#"@org_golang_x_tools//go/analysis/passes/shadow:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/shift:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/sigchanyzer:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/slog:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/sortslice:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/stdmethods:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/stringintconv:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/structtag:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/testinggoroutine:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/tests:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/timeformat:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/unmarshal:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/unreachable:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/unsafeptr:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/unusedresult:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/unusedwrite:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/usesgenerics:go_default_library",
|
||||
] + select({
|
||||
# nogo checks that fail with coverage enabled.
|
||||
":coverage_enabled": [],
|
||||
@@ -136,7 +263,7 @@ nogo(
|
||||
"@org_golang_x_tools//go/analysis/passes/composite:go_default_library",
|
||||
"@org_golang_x_tools//go/analysis/passes/lostcancel:go_default_library",
|
||||
],
|
||||
}),
|
||||
}) + ["@co_honnef_go_tools//staticcheck/%s:go_default_library" % c for c in STATICCHECK_ANALYZERS],
|
||||
)
|
||||
|
||||
config_setting(
|
||||
@@ -144,6 +271,11 @@ config_setting(
|
||||
values = {"define": "coverage_enabled=1"},
|
||||
)
|
||||
|
||||
config_setting(
|
||||
name = "pgo_enabled",
|
||||
values = {"define": "pgo_enabled=1"},
|
||||
)
|
||||
|
||||
common_files = {
|
||||
"//:LICENSE.md": "LICENSE.md",
|
||||
"//:README.md": "README.md",
|
||||
|
||||
@@ -55,7 +55,7 @@ bazel build //beacon-chain --config=release
|
||||
## Adding / updating dependencies
|
||||
|
||||
1. Add your dependency as you would with go modules. I.e. `go get ...`
|
||||
1. Run `gazelle update-repos -from_file=go.mod` to update the bazel managed dependencies.
|
||||
1. Run `bazel run //:gazelle -- update-repos -from_file=go.mod` to update the bazel managed dependencies.
|
||||
|
||||
Example:
|
||||
|
||||
|
||||
@@ -30,9 +30,6 @@ You can use `bazel run //tools/genesis-state-gen` to create a deterministic gene
|
||||
- **--output-ssz** string: Output filename of the SSZ marshaling of the generated genesis state
|
||||
- **--config-name=interop** string: name of the beacon chain config to use when generating the state. ex mainnet|minimal|interop
|
||||
|
||||
**deprecated flag: use --config-name instead**
|
||||
- **--mainnet-config** bool: Select whether genesis state should be generated with mainnet or minimal (default) params
|
||||
|
||||
The example below creates 64 validator keys, instantiates a genesis state with those 64 validators and with genesis unix timestamp 1567542540,
|
||||
and finally writes a ssz encoded output to ~/Desktop/genesis.ssz. This file can be used to kickstart the beacon chain in the next section. When using the `--interop-*` flags, the beacon node will assume the `interop` config should be used, unless a different config is specified on the command line.
|
||||
|
||||
|
||||
6
MODULE.bazel
Normal file
6
MODULE.bazel
Normal file
@@ -0,0 +1,6 @@
|
||||
###############################################################################
|
||||
# Bazel now uses Bzlmod by default to manage external dependencies.
|
||||
# Please consider migrating your external dependencies from WORKSPACE to MODULE.bazel.
|
||||
#
|
||||
# For more details, please check https://github.com/bazelbuild/bazel/issues/18958
|
||||
###############################################################################
|
||||
1632
MODULE.bazel.lock
generated
Normal file
1632
MODULE.bazel.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -7,7 +7,7 @@
|
||||
[](https://discord.gg/prysmaticlabs)
|
||||
[](https://www.gitpoap.io/gh/prysmaticlabs/prysm)
|
||||
|
||||
This is the core repository for Prysm, a [Golang](https://golang.org/) implementation of the [Ethereum Consensus](https://ethereum.org/en/eth2/) specification, developed by [Prysmatic Labs](https://prysmaticlabs.com). See the [Changelog](https://github.com/prysmaticlabs/prysm/releases) for details of the latest releases and upcoming breaking changes.
|
||||
This is the core repository for Prysm, a [Golang](https://golang.org/) implementation of the [Ethereum Consensus](https://ethereum.org/en/developers/docs/consensus-mechanisms/#proof-of-stake) [specification](https://github.com/ethereum/consensus-specs), developed by [Offchain Labs](https://www.offchainlabs.com). See the [Changelog](https://github.com/prysmaticlabs/prysm/releases) for details of the latest releases and upcoming breaking changes.
|
||||
|
||||
### Getting Started
|
||||
|
||||
|
||||
@@ -1,45 +1,53 @@
|
||||
## Terms of Use
|
||||
# Terms of Use
|
||||
Effective as of November 2, 2023
|
||||
|
||||
Effective as of Oct 14, 2020
|
||||
By downloading, accessing or using the Prysm implementation (“Prysm”), you (referenced herein as “you” or the “user”) certify that you have read and agreed to the terms and conditions below (the “Terms”) which form a binding contract between you and Offchain Labs, Inc. (as successor in interest to Prysmatic Labs LLC) (referenced herein as “Offchain Labs”, “we” or “us”). If you do not agree to the Terms, do not download or use Prysm. Additionally, the Terms of Use available at https://arbitrum.io/tos (or any successor site, the “OCL Terms of Use”) are hereby incorporated by reference into these Terms. In the event of any conflict between provisions set forth herein and those set forth in the OCL Terms of Use, the provisions set forth herein shall control.
|
||||
|
||||
By downloading, accessing or using the Prysm implementation (“Prysm”), you (referenced herein as “you” or the “user”) certify that you have read and agreed to the terms and conditions below (the “Terms”) which form a binding contract between you and Prysmatic Labs (referenced herein as “we” or “us”). If you do not agree to the Terms, do not download or use Prysm.
|
||||
## About Prysm
|
||||
|
||||
### About Prysm
|
||||
Prysm is a client implementation for Ethereum consensus protocol for a proof-of-stake blockchain. To participate in the network, a user must send ETH from the Eth1.0 chain into a validator deposit contract, which will queue in the user as a validator in the system. Validators participate in proposing and voting on blocks in the protocol, and the network applies rewards/penalties based on their behavior. A detailed set of installation and usage instructions as well as breakdowns of each individual component are available in the official documentation portal, however, we do not warrant the accuracy, completeness or usefulness of this documentation. Any reliance you place on such information is strictly at your own risk.
|
||||
Prysm is a client implementation for the Ethereum blockchain’s consensus protocol. To participate in the network, a user must send ETH from the Ethereum mainnet blockchain to a validator deposit smart contract on Ethereum mainnet. Validators participate in proposing and voting on blocks in the protocol, and the network applies rewards/penalties based on their behavior. A detailed set of installation and usage instructions as well as breakdowns of each individual component are available in the official documentation portal, however, we do not warrant the accuracy, completeness or usefulness of this documentation. Any reliance you place on such information is strictly at your own risk.
|
||||
|
||||
### Licensing Terms
|
||||
Prysm is a fully open-source software program licensed pursuant to the GNU General Public License v3.0.
|
||||
## Licensing Terms
|
||||
Prysm is an open-source software program licensed pursuant to the GNU General Public License v3.0.
|
||||
The Offchain Labs name, the term “Prysm” and all related names, logos, product and service names, designs and slogans are trademarks of Offchain Labs or its affiliates and/or licensors. You must not use such marks without our prior written permission.
|
||||
PLEASE READ THESE TERMS CAREFULLY, AS THE OCL TERMS OF USE INCORPORATED BY REFERENCE HEREIN CONTAIN AN AGREEMENT TO ARBITRATE AND OTHER IMPORTANT INFORMATION REGARDING YOUR LEGAL RIGHTS, REMEDIES, AND OBLIGATIONS. THE AGREEMENT TO ARBITRATE REQUIRES (WITH LIMITED EXCEPTION) THAT YOU SUBMIT CLAIMS YOU HAVE AGAINST US TO BINDING AND FINAL ARBITRATION, AND FURTHER (1) YOU WILL ONLY BE PERMITTED TO PURSUE CLAIMS AGAINST OFFCHAIN LABS ON AN INDIVIDUAL BASIS, NOT AS A PLAINTIFF OR CLASS MEMBER IN ANY CLASS OR REPRESENTATIVE ACTION OR PROCEEDING, (2) YOU WILL ONLY BE PERMITTED TO SEEK RELIEF (INCLUDING MONETARY, INJUNCTIVE, AND DECLARATORY RELIEF) ON AN INDIVIDUAL BASIS, AND (3) YOU MAY NOT BE ABLE TO HAVE ANY CLAIMS YOU HAVE AGAINST US RESOLVED BY A JURY OR IN A COURT OF LAW.
|
||||
|
||||
The Prysmatic Labs name, the term “Prysm” and all related names, logos, product and service names, designs and slogans are trademarks of Prysmatic Labs or its affiliates and/or licensors. You must not use such marks without our prior written permission.
|
||||
## Risks of Operating Prysm
|
||||
|
||||
### Risks of Operating Prysm
|
||||
The use of Prysm and acting as a validator on the Ethereum network can lead to loss of money. Ethereum is still an experimental system and ETH remains a risky investment. You alone are responsible for your actions on Prysm including the security of your ETH and meeting any applicable minimum system requirements.
|
||||
The use of Prysm and acting as a validator on the Ethereum network can lead to loss of money, tokens and value. Ethereum is still an experimental system and ETH remains a risky investment. You alone are responsible for your actions on Prysm, including the security of your ETH and meeting any applicable minimum system requirements.
|
||||
|
||||
Use of Prysm and the ability to receive rewards or penalties may be affected at any time by mistakes made by the user or other users, software problems such as bugs, errors, incorrectly constructed transactions, unsafe cryptographic libraries or malware affecting the network, technical failures in the hardware of a user, security problems experienced by a user and/or actions or inactions of third parties and/or events experienced by third parties, among other risks. We cannot and do not guarantee that any user of Prysm will make money, that the Prysm network will operate in accordance with the documentation or that transactions will be effective or secure.
|
||||
|
||||
We make no claims that Prysm is appropriate or permitted for use in any specific jurisdiction. Access to Prysm may not be legal by certain persons or in certain jurisdictions or countries. If you access Prysm, you do so on your own initiative and are responsible for compliance with local laws.
|
||||
YOU ACKNOWLEDGE THAT WE ARE NOT RESPONSIBLE FOR ANY RISKS ASSOCIATED WITH YOUR USE OF PRYSM, AND CANNOT BE HELD LIABLE FOR ANY RESULTING LOSSES THAT YOU EXPERIENCE WHILE ACCESSING OR USING PRYSM.
|
||||
|
||||
Some Internet plans will charge an additional amount for any excess upload bandwidth used that isn’t included in the plan and may terminate your connection without warning because of overuse. We advise that you check whether your Internet connection is subjected to such limitations and monitor your bandwidth use so that you can stop Prysm before you reach your upload limit.
|
||||
BY ACCESSING AND USING PRYSM, YOU REPRESENT AND WARRANT THAT YOU UNDERSTAND THE INHERENT RISKS ASSOCIATED WITH USING CRYPTOGRAPHIC AND BLOCKCHAIN-BASED SYSTEMS, AND THAT YOU HAVE A WORKING KNOWLEDGE OF THE USAGE AND INTRICACIES OF DIGITAL ASSETS, SUCH AS THOSE FOLLOWING THE ETHEREUM TOKEN STANDARD (ERC-20). YOU FURTHER UNDERSTAND THAT THE MARKETS FOR DIGITAL ASSETS ARE HIGHLY VOLATILE DUE TO VARIOUS FACTORS, INCLUDING ADOPTION, SPECULATION, TECHNOLOGY, SECURITY, AND REGULATION. YOU ACKNOWLEDGE AND ACCEPT THAT THE COST AND SPEED OF TRANSACTING WITH CRYPTOGRAPHIC AND BLOCKCHAIN-BASED SYSTEMS SUCH AS ETHEREUM ARE VARIABLE AND MAY INCREASE DRAMATICALLY AT ANY TIME. YOU UNDERSTAND THAT ANYONE CAN CREATE A TOKEN, INCLUDING FAKE VERSIONS OF EXISTING TOKENS AND TOKENS THAT FALSELY CLAIM TO REPRESENT PROJECTS, AND ACKNOWLEDGE AND ACCEPT THE RISK THAT YOU MAY MISTAKENLY INTERACT WITH THOSE OR OTHER TOKENS. YOU FURTHER ACKNOWLEDGE THAT WE ARE NOT RESPONSIBLE FOR ANY OF THE VARIABLES OR RISKS DESCRIBED IN THESE TERMS. YOU UNDERSTAND AND AGREE TO ASSUME FULL RESPONSIBILITY FOR ALL OF THE RISKS OF ACCESSING AND USING PRYSM. YOU ARE SOLELY RESPONSIBLE FOR YOUR WALLETS, FOR SAFEGUARDING THE ASSOCIATED PRIVATE KEY AND FOR ANY ACTIVITY THAT OCCURS USING YOUR WALLET. WITHOUT LIMITING THE FOREGOING, YOU ALSO UNDERSTAND THAT THERE MAY BE TAX AND REGULATORY RISKS RELATED TO USING PRYSM. IT IS YOUR SOLE RESPONSIBILITY TO DETERMINE WHETHER, AND TO WHAT EXTENT, ANY TAXES APPLY TO ANY TRANSACTIONS YOU CONDUCT IN CONNECTION WITH YOUR USE OF PRYSM, AND TO WITHHOLD, COLLECT, REPORT AND REMIT THE CORRECT AMOUNTS OF TAXES TO THE APPROPRIATE TAX AUTHORITIES. DIGITAL ASSETS, BLOCKCHAIN TECHNOLOGY, AND ANY RELATED SOFTWARE AND SERVICES ARE ALSO SUBJECT TO LEGAL AND REGULATORY UNCERTAINTY IN THE UNITED STATES AND OTHER JURISDICTIONS. YOU UNDERSTAND THAT LEGISLATIVE AND REGULATORY CHANGES OR ACTIONS MAY ADVERSELY AFFECT THE USAGE, TRANSFERABILITY, TRANSACTABILITY AND ACCESSIBILITY RELATED TO PRYSM.
|
||||
|
||||
### Warranty Disclaimer
|
||||
PRYSM IS PROVIDED ON AN “AS-IS” BASIS AND MAY INCLUDE ERRORS, OMISSIONS, OR OTHER INACCURACIES. PRYSMATIC LABS AND ITS CONTRIBUTORS MAKE NO REPRESENTATIONS OR WARRANTIES ABOUT PRYSM FOR ANY PURPOSE, AND HEREBY EXPRESSLY DISCLAIM ALL WARRANTIES, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, ANY WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT OR ANY OTHER IMPLIED WARRANTY UNDER THE UNIFORM COMPUTER INFORMATION TRANSACTIONS ACT AS ENACTED BY ANY STATE. WE ALSO MAKE NO REPRESENTATIONS OR WARRANTIES THAT PRYSM WILL OPERATE ERROR-FREE, UNINTERRUPTED, OR IN A MANNER THAT WILL MEET YOUR REQUIREMENTS AND/OR NEEDS. THEREFORE, YOU ASSUME THE ENTIRE RISK REGARDING THE QUALITY AND/OR PERFORMANCE OF PRYSM AND ANY TRANSACTIONS ENTERED INTO THEREON.
|
||||
We make no claims that Prysm is appropriate or permitted for use in any specific jurisdiction. Access to Prysm may not be legal by certain persons or in certain jurisdictions or countries. If you access Prysm, you do so on your own initiative and are responsible for compliance with all Applicable Law (as defined below), including, without limitation, for the avoidance of doubt, local laws.
|
||||
|
||||
### Limitation of Liability
|
||||
In no event will Prysmatic Labs or any of its contributors be liable, whether in contract, warranty, tort (including negligence, whether active, passive or imputed), product liability, strict liability or other theory, breach of statutory duty or otherwise arising out of, or in connection with, your use of Prysm, for any direct, indirect, incidental, special or consequential damages (including any loss of profits or data, business interruption or other pecuniary loss, or damage, loss or other compromise of data, in each case whether direct, indirect, incidental, special or consequential) arising out of use Prysm, even if we or other users have been advised of the possibility of such damages. The foregoing limitations and disclaimers shall apply to the maximum extent permitted by applicable law, even if any remedy fails of its essential purpose. You acknowledge and agree that the limitations of liability afforded us hereunder constitute a material and actual inducement and condition to entering into these Terms, and are reasonable, fair and equitable in scope to protect our legitimate interests in light of the fact that we are not receiving consideration from you for providing Prysm.
|
||||
Some Internet plans will charge additional amounts for bandwidth or any excess upload bandwidth used that isn’t included in the plan and may terminate your connection without warning because of overuse. We advise that you check whether your Internet connection is subjected to any such limitations and monitor your bandwidth use and upload volumes.
|
||||
|
||||
### Indemnification
|
||||
To the maximum extent permitted by law, you will defend, indemnify and hold Prysmatic Labs and its contributors harmless from and against any and all claims, actions, suits, investigations, or proceedings by any third party (including any party or purported party to or beneficiary or purported beneficiary of any transaction on Prysm), as well as any and all losses, liabilities,
|
||||
damages, costs, and expenses (including reasonable attorneys’ fees) arising out of, accruing from, or in any way related to (i) your breach of the terms of this Agreement, (ii) any transaction, or the failure to occur of any transaction on Prysm, and (iii) your negligence, fraud, or willful misconduct.
|
||||
## Warranty Disclaimer
|
||||
|
||||
### Compliance with Laws and Tax Obligations
|
||||
Your use of Prysm is subject to all applicable laws of any governmental authority, including, without limitation, federal, state and foreign securities laws, tax laws, tariff and trade laws, ordinances, judgments, decrees, injunctions, writs and orders or like actions of any governmental authority and rules, regulations, orders, interpretations, licenses, and permits of any federal,
|
||||
regional, state, county, municipal or other governmental authority and you agree to comply with all such laws in your use of Prysm. The users of Prysm are solely responsible to determinate what, if any, taxes apply to their ETH transactions. The owners of, or contributors to, Prysm are not responsible for determining the taxes that apply to ETH transactions.
|
||||
PRYSM IS PROVIDED ON AN “AS-IS” BASIS AND MAY INCLUDE ERRORS, OMISSIONS, OR OTHER INACCURACIES. WITHOUT LIMITING ANYTHING SET FORTH ELSEWHERE IN THESE TERMS, OFFCHAIN LABS AND ITS CONTRIBUTORS MAKE NO REPRESENTATIONS OR WARRANTIES ABOUT PRYSM FOR ANY PURPOSE, AND HEREBY EXPRESSLY DISCLAIM ALL WARRANTIES, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, ANY WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT OR ANY OTHER IMPLIED WARRANTY UNDER THE UNIFORM COMPUTER INFORMATION TRANSACTIONS ACT AS ENACTED BY ANY STATE OR OTHER GOVERNMENTAL AUTHORITY. WE ALSO MAKE NO REPRESENTATIONS OR WARRANTIES THAT PRYSM WILL OPERATE ERROR-FREE, UNINTERRUPTED, OR IN A MANNER THAT WILL MEET YOUR REQUIREMENTS AND/OR NEEDS. THEREFORE, YOU ASSUME THE ENTIRE RISK REGARDING THE QUALITY AND/OR PERFORMANCE OF PRYSM AND ANY TRANSACTIONS ENTERED INTO THEREON.
|
||||
|
||||
### Miscellaneous
|
||||
These Terms will be construed and enforced in accordance with the laws of the state of Illinois as applied to agreements entered into and completely performed in Illinois. You agree to the personal jurisdiction by and venue in Illinois and waive any objection to such jurisdiction or venue.
|
||||
## Limitation of Liability
|
||||
|
||||
We reserve the right to revise these Terms, and your rights and obligations are at all times subject to the then-current Terms provided on Prysm. Your continued use of Prysm constitutes acceptance of such revised Terms.
|
||||
IN NO EVENT WILL OFFCHAIN LABS OR ANY OF ITS AFFILIATES OR ITS OR ANY SUCH AFFILIATE’S DIRECTORS, OFFICERS, EMPLOYEES, AGENTS, OR REPRESENTATIVES OR ANY CONTRIBUTORS (COLLECTIVELY, THE “OCL PARTIES”) BE LIABLE, WHETHER IN CONTRACT, WARRANTY, TORT (INCLUDING NEGLIGENCE, WHETHER ACTIVE, PASSIVE OR IMPUTED), PRODUCT LIABILITY, STRICT LIABILITY OR OTHER THEORY, BREACH OF STATUTORY DUTY OR OTHERWISE ARISING OUT OF, OR IN CONNECTION WITH, YOUR USE OF PRYSM, FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL OR CONSEQUENTIAL DAMAGES (INCLUDING ANY LOSS OF PROFITS OR DATA, BUSINESS INTERRUPTION OR OTHER PECUNIARY LOSS, OR DAMAGE, LOSS OR OTHER COMPROMISE OF DATA, IN EACH CASE WHETHER DIRECT, INDIRECT, INCIDENTAL, SPECIAL OR CONSEQUENTIAL) ARISING OUT OF USE PRYSM, EVEN IF WE OR OTHER USERS HAVE BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. The foregoing limitations and disclaimers shall apply to the maximum extent permitted by Applicable Law, even if any remedy fails of its essential purpose. You acknowledge and agree that the limitations of liability afforded us hereunder constitute a material and actual inducement and condition to entering into these Terms, and are reasonable, fair and equitable in scope to protect our legitimate interests in light of the fact that we are not receiving consideration from you for providing Prysm.
|
||||
|
||||
These Terms constitute the entire agreement between you and Prysmatic Labs regarding use of Prysm and will supersede all prior agreements whether, written or oral. No usage of trade or other regular practice or method of dealing between the parties will be used to modify, interpret, supplement, or alter the terms of these Terms.
|
||||
## Indemnification
|
||||
|
||||
To the maximum extent permitted by Applicable Law, you will defend, indemnify and hold each OCL Party harmless from and against any and all claims, actions, suits, investigations, or proceedings by any third party (including any party or purported party to or beneficiary or purported beneficiary of any transaction or other activity on Prysm), as well as any and all losses, liabilities, damages, costs, and expenses (including reasonable attorneys’ fees and costs) arising out of, accruing from, or in any way related to (i) your breach of the terms of this Agreement, (ii) any transaction, or the failure to occur of any transaction on Prysm, and (iii) your negligence, fraud, or willful misconduct.
|
||||
|
||||
## Compliance with Laws
|
||||
|
||||
Your use of Prysm is subject to all applicable laws of any governmental authority, including, without limitation, federal, state and foreign securities laws, tax laws, tariff and trade laws, ordinances, judgments, decrees, injunctions, writs and orders or like actions of any governmental authority and rules, regulations, orders, interpretations, licenses, and permits of any federal, regional, state, county, municipal or other governmental authority (collectively, “Applicable Law”) and you agree to comply with all such Applicable Law in your use of Prysm. The users of Prysm are solely responsible to determinate what, if any, taxes apply to their ETH transactions. The owners of, or contributors to, Prysm are not responsible for determining the taxes that apply to ETH transactions.
|
||||
|
||||
## Miscellaneous
|
||||
|
||||
These Terms will be governed by the laws of the State of Delaware without regard to its conflict of law provisions. With respect to any disputes or claims not subject to arbitration, as set forth in the OCL Terms of Use, you and Offchain Labs submit to the personal and exclusive jurisdiction of the state and federal courts located within New York, New York and waive any objection to such jurisdiction and venue. The failure of Offchain Labs to exercise or enforce any right or provision of these Terms will not constitute a waiver of such right or provision.
|
||||
We reserve the right to revise these Terms, and your rights and obligations are at all times subject to the then-current Terms provided on Prysm. Your use of Prysm following any such revision to these Terms constitutes acceptance of such revised Terms.
|
||||
|
||||
These Terms constitute the entire agreement between you and Offchain Labs regarding use of Prysm and will supersede all prior agreements whether, written or oral. No usage of trade or other regular practice or method of dealing between the parties will be used to modify, interpret, supplement, or alter the terms of these Terms.
|
||||
|
||||
If any portion of these Terms is held invalid or unenforceable, such invalidity or enforceability will not affect the other provisions of these Terms, which will remain in full force and effect, and the invalid or unenforceable portion will be given effect to the greatest extent possible. The failure of a party to require performance of any provision will not affect that party’s right to require performance at any time thereafter, nor will a waiver of any breach or default of these Terms or any provision of these Terms constitute a waiver of any subsequent breach or default or a waiver of the provision itself.
|
||||
|
||||
If any portion of these Terms is held invalid or unenforceable, such invalidity or enforceability will not affect the other provisions of these Terms, which will remain in full force and effect, and the invalid or unenforceable portion will be given effect to the greatest extent possible. The failure of a party to require performance of any provision will not affect that party’s right to require performance at any time thereafter, nor will a waiver of any breach or default of these Terms or any provision of these Terms constitute a waiver of any subsequent breach or default or a waiver of the provision itself.
|
||||
204
WORKSPACE
204
WORKSPACE
@@ -16,18 +16,36 @@ load("@rules_pkg//:deps.bzl", "rules_pkg_dependencies")
|
||||
|
||||
rules_pkg_dependencies()
|
||||
|
||||
HERMETIC_CC_TOOLCHAIN_VERSION = "v3.0.1"
|
||||
|
||||
http_archive(
|
||||
name = "hermetic_cc_toolchain",
|
||||
sha256 = "973ab22945b921ef45b8e1d6ce01ca7ce1b8a462167449a36e297438c4ec2755",
|
||||
strip_prefix = "hermetic_cc_toolchain-5098046bccc15d2962f3cc8e7e53d6a2a26072dc",
|
||||
sha256 = "3bc6ec127622fdceb4129cb06b6f7ab098c4d539124dde96a6318e7c32a53f7a",
|
||||
urls = [
|
||||
"https://github.com/uber/hermetic_cc_toolchain/archive/5098046bccc15d2962f3cc8e7e53d6a2a26072dc.tar.gz", # 2023-06-28
|
||||
"https://mirror.bazel.build/github.com/uber/hermetic_cc_toolchain/releases/download/{0}/hermetic_cc_toolchain-{0}.tar.gz".format(HERMETIC_CC_TOOLCHAIN_VERSION),
|
||||
"https://github.com/uber/hermetic_cc_toolchain/releases/download/{0}/hermetic_cc_toolchain-{0}.tar.gz".format(HERMETIC_CC_TOOLCHAIN_VERSION),
|
||||
],
|
||||
)
|
||||
|
||||
load("@hermetic_cc_toolchain//toolchain:defs.bzl", zig_toolchains = "toolchains")
|
||||
|
||||
zig_toolchains()
|
||||
# Temporarily use a nightly build until 0.12.0 is released.
|
||||
# See: https://github.com/prysmaticlabs/prysm/issues/13130
|
||||
zig_toolchains(
|
||||
host_platform_sha256 = {
|
||||
"linux-aarch64": "45afb8e32adde825165f4f293fcea9ecea503f7f9ec0e9bf4435afe70e67fb70",
|
||||
"linux-x86_64": "f136c6a8a0f6adcb057d73615fbcd6f88281b3593f7008d5f7ed514ff925c02e",
|
||||
"macos-aarch64": "05d995853c05243151deff47b60bdc2674f1e794a939eaeca0f42312da031cee",
|
||||
"macos-x86_64": "721754ba5a50f31e8a1f0e1a74cace26f8246576878ac4a8591b0ee7b6db1fc1",
|
||||
"windows-x86_64": "93f5248b2ea8c5ee8175e15b1384e133edc1cd49870b3ea259062a2e04164343",
|
||||
},
|
||||
url_formats = [
|
||||
"https://ziglang.org/builds/zig-{host_platform}-{version}.{_ext}",
|
||||
"https://mirror.bazel.build/ziglang.org/builds/zig-{host_platform}-{version}.{_ext}",
|
||||
"https://prysmaticlabs.com/mirror/ziglang.org/builds/zig-{host_platform}-{version}.{_ext}",
|
||||
],
|
||||
version = "0.12.0-dev.1349+fa022d1ec",
|
||||
)
|
||||
|
||||
# Register zig sdk toolchains with support for Ubuntu 20.04 (Focal Fossa) which has an EOL date of April, 2025.
|
||||
# For ubuntu glibc support, see https://launchpad.net/ubuntu/+source/glibc
|
||||
@@ -65,10 +83,10 @@ bazel_skylib_workspace()
|
||||
|
||||
http_archive(
|
||||
name = "bazel_gazelle",
|
||||
sha256 = "d3fa66a39028e97d76f9e2db8f1b0c11c099e8e01bf363a923074784e451f809",
|
||||
integrity = "sha256-MpOL2hbmcABjA1R5Bj2dJMYO2o15/Uc5Vj9Q0zHLMgk=",
|
||||
urls = [
|
||||
"https://mirror.bazel.build/github.com/bazelbuild/bazel-gazelle/releases/download/v0.33.0/bazel-gazelle-v0.33.0.tar.gz",
|
||||
"https://github.com/bazelbuild/bazel-gazelle/releases/download/v0.33.0/bazel-gazelle-v0.33.0.tar.gz",
|
||||
"https://mirror.bazel.build/github.com/bazelbuild/bazel-gazelle/releases/download/v0.35.0/bazel-gazelle-v0.35.0.tar.gz",
|
||||
"https://github.com/bazelbuild/bazel-gazelle/releases/download/v0.35.0/bazel-gazelle-v0.35.0.tar.gz",
|
||||
],
|
||||
)
|
||||
|
||||
@@ -80,11 +98,36 @@ http_archive(
|
||||
)
|
||||
|
||||
http_archive(
|
||||
name = "io_bazel_rules_docker",
|
||||
sha256 = "b1e80761a8a8243d03ebca8845e9cc1ba6c82ce7c5179ce2b295cd36f7e394bf",
|
||||
urls = ["https://github.com/bazelbuild/rules_docker/releases/download/v0.25.0/rules_docker-v0.25.0.tar.gz"],
|
||||
name = "rules_distroless",
|
||||
sha256 = "e64f06e452cd153aeab81f752ccf4642955b3af319e64f7bc7a7c9252f76b10e",
|
||||
strip_prefix = "rules_distroless-f5e678217b57ce3ad2f1c0204bd4e9d416255773",
|
||||
url = "https://github.com/GoogleContainerTools/rules_distroless/archive/f5e678217b57ce3ad2f1c0204bd4e9d416255773.tar.gz",
|
||||
)
|
||||
|
||||
load("@rules_distroless//distroless:dependencies.bzl", "rules_distroless_dependencies")
|
||||
|
||||
rules_distroless_dependencies()
|
||||
|
||||
http_archive(
|
||||
name = "distroless",
|
||||
integrity = "sha256-Cf00kUp1NyXA3LzbdyYy4Kda27wbkB8+A9MliTxq4jE=",
|
||||
strip_prefix = "distroless-9dc924b9fe812eec2fa0061824dcad39eb09d0d6",
|
||||
url = "https://github.com/GoogleContainerTools/distroless/archive/9dc924b9fe812eec2fa0061824dcad39eb09d0d6.tar.gz", # 2024-01-24
|
||||
)
|
||||
|
||||
http_archive(
|
||||
name = "aspect_bazel_lib",
|
||||
sha256 = "f5ea76682b209cc0bd90d0f5a3b26d2f7a6a2885f0c5f615e72913f4805dbb0d",
|
||||
strip_prefix = "bazel-lib-2.5.0",
|
||||
url = "https://github.com/aspect-build/bazel-lib/releases/download/v2.5.0/bazel-lib-v2.5.0.tar.gz",
|
||||
)
|
||||
|
||||
load("@aspect_bazel_lib//lib:repositories.bzl", "aspect_bazel_lib_dependencies", "aspect_bazel_lib_register_toolchains")
|
||||
|
||||
aspect_bazel_lib_dependencies()
|
||||
|
||||
aspect_bazel_lib_register_toolchains()
|
||||
|
||||
http_archive(
|
||||
name = "rules_oci",
|
||||
sha256 = "c71c25ed333a4909d2dd77e0b16c39e9912525a98c7fa85144282be8d04ef54c",
|
||||
@@ -110,10 +153,10 @@ http_archive(
|
||||
# Expose internals of go_test for custom build transitions.
|
||||
"//third_party:io_bazel_rules_go_test.patch",
|
||||
],
|
||||
sha256 = "91585017debb61982f7054c9688857a2ad1fd823fc3f9cb05048b0025c47d023",
|
||||
sha256 = "80a98277ad1311dacd837f9b16db62887702e9f1d1c4c9f796d0121a46c8e184",
|
||||
urls = [
|
||||
"https://mirror.bazel.build/github.com/bazelbuild/rules_go/releases/download/v0.42.0/rules_go-v0.42.0.zip",
|
||||
"https://github.com/bazelbuild/rules_go/releases/download/v0.42.0/rules_go-v0.42.0.zip",
|
||||
"https://mirror.bazel.build/github.com/bazelbuild/rules_go/releases/download/v0.46.0/rules_go-v0.46.0.zip",
|
||||
"https://github.com/bazelbuild/rules_go/releases/download/v0.46.0/rules_go-v0.46.0.zip",
|
||||
],
|
||||
)
|
||||
|
||||
@@ -132,67 +175,16 @@ git_repository(
|
||||
# gazelle args: -go_prefix github.com/gogo/protobuf -proto legacy
|
||||
)
|
||||
|
||||
load(
|
||||
"@io_bazel_rules_docker//repositories:repositories.bzl",
|
||||
container_repositories = "repositories",
|
||||
)
|
||||
|
||||
container_repositories()
|
||||
|
||||
load(
|
||||
"@io_bazel_rules_docker//container:container.bzl",
|
||||
"container_pull",
|
||||
)
|
||||
|
||||
# Pulled gcr.io/distroless/cc-debian11:latest on 2022-02-23
|
||||
container_pull(
|
||||
name = "cc_image_base_amd64",
|
||||
digest = "sha256:2a0daf90a7deb78465bfca3ef2eee6e91ce0a5706059f05d79d799a51d339523",
|
||||
registry = "gcr.io",
|
||||
repository = "distroless/cc-debian11",
|
||||
)
|
||||
|
||||
# Pulled gcr.io/distroless/cc-debian11:debug on 2022-02-23
|
||||
container_pull(
|
||||
name = "cc_debug_image_base_amd64",
|
||||
digest = "sha256:7bd596f5f200588f13a69c268eea6ce428b222b67cd7428d6a7fef95e75c052a",
|
||||
registry = "gcr.io",
|
||||
repository = "distroless/cc-debian11",
|
||||
)
|
||||
|
||||
# Pulled from gcr.io/distroless/base-debian11:latest on 2022-02-23
|
||||
container_pull(
|
||||
name = "go_image_base_amd64",
|
||||
digest = "sha256:34e682800774ecbd0954b1663d90238505f1ba5543692dbc75feef7dd4839e90",
|
||||
registry = "gcr.io",
|
||||
repository = "distroless/base-debian11",
|
||||
)
|
||||
|
||||
# Pulled from gcr.io/distroless/base-debian11:debug on 2022-02-23
|
||||
container_pull(
|
||||
name = "go_debug_image_base_amd64",
|
||||
digest = "sha256:0f503c6bfd207793bc416f20a35bf6b75d769a903c48f180ad73f60f7b60d7bd",
|
||||
registry = "gcr.io",
|
||||
repository = "distroless/base-debian11",
|
||||
)
|
||||
|
||||
container_pull(
|
||||
name = "alpine_cc_linux_amd64",
|
||||
digest = "sha256:752aa0c9a88461ffc50c5267bb7497ef03a303e38b2c8f7f2ded9bebe5f1f00e",
|
||||
registry = "index.docker.io",
|
||||
repository = "pinglamb/alpine-glibc",
|
||||
)
|
||||
|
||||
load("@rules_oci//oci:pull.bzl", "oci_pull")
|
||||
|
||||
# A multi-arch base image
|
||||
oci_pull(
|
||||
name = "linux_debian11_multiarch_base", # Debian bullseye
|
||||
digest = "sha256:9b8e0854865dcaf49470b4ec305df45957020fbcf17b71eeb50ffd3bc5bf885d", # 2023-05-17
|
||||
digest = "sha256:b82f113425c5b5c714151aaacd8039bc141821cdcd3c65202d42bdf9c43ae60b", # 2023-12-12
|
||||
image = "gcr.io/distroless/cc-debian11",
|
||||
platforms = [
|
||||
"linux/amd64",
|
||||
"linux/arm64",
|
||||
"linux/arm64/v8",
|
||||
],
|
||||
reproducible = True,
|
||||
)
|
||||
@@ -206,10 +198,14 @@ load("@io_bazel_rules_go//go:deps.bzl", "go_register_toolchains", "go_rules_depe
|
||||
go_rules_dependencies()
|
||||
|
||||
go_register_toolchains(
|
||||
go_version = "1.20.10",
|
||||
go_version = "1.21.8",
|
||||
nogo = "@//:nogo",
|
||||
)
|
||||
|
||||
load("//:distroless_deps.bzl", "distroless_deps")
|
||||
|
||||
distroless_deps()
|
||||
|
||||
http_archive(
|
||||
name = "io_kubernetes_build",
|
||||
sha256 = "b84fbd1173acee9d02a7d3698ad269fdf4f7aa081e9cecd40e012ad0ad8cfa2a",
|
||||
@@ -228,8 +224,8 @@ filegroup(
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
sha256 = "91434d5fd5e1c6eb7b0174fed2afe25e09bddf00e1e4c431db931b2cee4e7773",
|
||||
url = "https://github.com/eth-clients/slashing-protection-interchange-tests/archive/b8413ca42dc92308019d0d4db52c87e9e125c4e9.tar.gz",
|
||||
sha256 = "516d551cfb3e50e4ac2f42db0992f4ceb573a7cb1616d727a725c8161485329f",
|
||||
url = "https://github.com/eth-clients/slashing-protection-interchange-tests/archive/refs/tags/v5.3.0.tar.gz",
|
||||
)
|
||||
|
||||
http_archive(
|
||||
@@ -247,7 +243,9 @@ filegroup(
|
||||
url = "https://github.com/ethereum/EIPs/archive/5480440fe51742ed23342b68cf106cefd427e39d.tar.gz",
|
||||
)
|
||||
|
||||
consensus_spec_version = "v1.4.0-beta.3"
|
||||
consensus_spec_version = "v1.4.0-beta.7"
|
||||
|
||||
consensus_spec_test_version = "v1.4.0-beta.7-hotfix"
|
||||
|
||||
bls_test_version = "v0.1.1"
|
||||
|
||||
@@ -263,8 +261,8 @@ filegroup(
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
sha256 = "67ae5b8fc368853da23d4297e480a4b7f4722fb970d1c7e2b6a5b7faef9cb907",
|
||||
url = "https://github.com/ethereum/consensus-spec-tests/releases/download/%s/general.tar.gz" % consensus_spec_version,
|
||||
sha256 = "c282c0f86f23f3d2e0f71f5975769a4077e62a7e3c7382a16bd26a7e589811a0",
|
||||
url = "https://github.com/ethereum/consensus-spec-tests/releases/download/%s/general.tar.gz" % consensus_spec_test_version,
|
||||
)
|
||||
|
||||
http_archive(
|
||||
@@ -279,8 +277,8 @@ filegroup(
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
sha256 = "82474f29fff4abd09fb1e71bafa98827e2573cf0ad02cf119610961831dc3bb5",
|
||||
url = "https://github.com/ethereum/consensus-spec-tests/releases/download/%s/minimal.tar.gz" % consensus_spec_version,
|
||||
sha256 = "4649c35aa3b8eb0cfdc81bee7c05649f90ef36bede5b0513e1f2e8baf37d6033",
|
||||
url = "https://github.com/ethereum/consensus-spec-tests/releases/download/%s/minimal.tar.gz" % consensus_spec_test_version,
|
||||
)
|
||||
|
||||
http_archive(
|
||||
@@ -295,8 +293,8 @@ filegroup(
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
sha256 = "60e4b6eb6c341daab7ee5614a8e3f28567247c504c593b951bfe919622c8ef8f",
|
||||
url = "https://github.com/ethereum/consensus-spec-tests/releases/download/%s/mainnet.tar.gz" % consensus_spec_version,
|
||||
sha256 = "c5a03f724f757456ffaabd2a899992a71d2baf45ee4db65ca3518f2b7ee928c8",
|
||||
url = "https://github.com/ethereum/consensus-spec-tests/releases/download/%s/mainnet.tar.gz" % consensus_spec_test_version,
|
||||
)
|
||||
|
||||
http_archive(
|
||||
@@ -310,7 +308,7 @@ filegroup(
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
sha256 = "fdab9756c93a250219ff6a10d5a9faee1e2e6878a14508410409e307362c6991",
|
||||
sha256 = "049c29267310e6b88280f4f834a75866c2f5b9036fa97acb9d9c6db8f64d9118",
|
||||
strip_prefix = "consensus-specs-" + consensus_spec_version[1:],
|
||||
url = "https://github.com/ethereum/consensus-specs/archive/refs/tags/%s.tar.gz" % consensus_spec_version,
|
||||
)
|
||||
@@ -341,9 +339,25 @@ filegroup(
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
sha256 = "2701e1e1a3ec10c673fe7dbdbbe6f02c8ae8c922aebbf6e720d8c72d5458aafe",
|
||||
strip_prefix = "eth2-networks-7b4897888cebef23801540236f73123e21774954",
|
||||
url = "https://github.com/eth-clients/eth2-networks/archive/7b4897888cebef23801540236f73123e21774954.tar.gz",
|
||||
sha256 = "77e7e3ed65e33b7bb19d30131f4c2bb39e4dfeb188ab9ae84651c3cc7600131d",
|
||||
strip_prefix = "eth2-networks-934c948e69205dcf2deb87e4ae6cc140c335f94d",
|
||||
url = "https://github.com/eth-clients/eth2-networks/archive/934c948e69205dcf2deb87e4ae6cc140c335f94d.tar.gz",
|
||||
)
|
||||
|
||||
http_archive(
|
||||
name = "goerli_testnet",
|
||||
build_file_content = """
|
||||
filegroup(
|
||||
name = "configs",
|
||||
srcs = [
|
||||
"prater/config.yaml",
|
||||
],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
sha256 = "43fc0f55ddff7b511713e2de07aa22846a67432df997296fb4fc09cd8ed1dcdb",
|
||||
strip_prefix = "goerli-6522ac6684693740cd4ddcc2a0662e03702aa4a1",
|
||||
url = "https://github.com/eth-clients/goerli/archive/6522ac6684693740cd4ddcc2a0662e03702aa4a1.tar.gz",
|
||||
)
|
||||
|
||||
http_archive(
|
||||
@@ -357,17 +371,17 @@ filegroup(
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
sha256 = "9f66d8d5644982d3d0d2e3d2b9ebe77a5f96638a5d7fcd715599c32818195cb3",
|
||||
strip_prefix = "holesky-ea39b9006210848e13f28d92e12a30548cecd41d",
|
||||
url = "https://github.com/eth-clients/holesky/archive/ea39b9006210848e13f28d92e12a30548cecd41d.tar.gz", # 2023-09-21
|
||||
sha256 = "5f4be6fd088683ea9db45c863b9c5a1884422449e5b59fd2d561d3ba0f73ffd9",
|
||||
strip_prefix = "holesky-9d9aabf2d4de51334ee5fed6c79a4d55097d1a43",
|
||||
url = "https://github.com/eth-clients/holesky/archive/9d9aabf2d4de51334ee5fed6c79a4d55097d1a43.tar.gz", # 2024-01-22
|
||||
)
|
||||
|
||||
http_archive(
|
||||
name = "com_google_protobuf",
|
||||
sha256 = "4e176116949be52b0408dfd24f8925d1eb674a781ae242a75296b17a1c721395",
|
||||
strip_prefix = "protobuf-23.3",
|
||||
sha256 = "9bd87b8280ef720d3240514f884e56a712f2218f0d693b48050c836028940a42",
|
||||
strip_prefix = "protobuf-25.1",
|
||||
urls = [
|
||||
"https://github.com/protocolbuffers/protobuf/archive/v23.3.tar.gz",
|
||||
"https://github.com/protocolbuffers/protobuf/archive/v25.1.tar.gz",
|
||||
],
|
||||
)
|
||||
|
||||
@@ -401,24 +415,6 @@ load("@prysm//testing/endtoend:deps.bzl", "e2e_deps")
|
||||
|
||||
e2e_deps()
|
||||
|
||||
load(
|
||||
"@io_bazel_rules_docker//go:image.bzl",
|
||||
_go_image_repos = "repositories",
|
||||
)
|
||||
|
||||
# Golang images
|
||||
# This is using gcr.io/distroless/base
|
||||
_go_image_repos()
|
||||
|
||||
# CC images
|
||||
# This is using gcr.io/distroless/base
|
||||
load(
|
||||
"@io_bazel_rules_docker//cc:image.bzl",
|
||||
_cc_image_repos = "repositories",
|
||||
)
|
||||
|
||||
_cc_image_repos()
|
||||
|
||||
load("@com_github_atlassian_bazel_tools//gometalinter:deps.bzl", "gometalinter_dependencies")
|
||||
|
||||
gometalinter_dependencies()
|
||||
|
||||
@@ -2,7 +2,10 @@ load("@prysm//tools/go:def.bzl", "go_library")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = ["headers.go"],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v4/api",
|
||||
srcs = [
|
||||
"constants.go",
|
||||
"headers.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api",
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
@@ -7,7 +7,7 @@ go_library(
|
||||
"errors.go",
|
||||
"options.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v4/api/client",
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/client",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = ["@com_github_pkg_errors//:go_default_library"],
|
||||
)
|
||||
|
||||
@@ -6,15 +6,17 @@ go_library(
|
||||
"checkpoint.go",
|
||||
"client.go",
|
||||
"doc.go",
|
||||
"health.go",
|
||||
"log.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v4/api/client/beacon",
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/client/beacon",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//api/client:go_default_library",
|
||||
"//api/client/beacon/iface:go_default_library",
|
||||
"//api/server:go_default_library",
|
||||
"//api/server/structs:go_default_library",
|
||||
"//beacon-chain/core/helpers:go_default_library",
|
||||
"//beacon-chain/rpc/apimiddleware:go_default_library",
|
||||
"//beacon-chain/rpc/eth/beacon:go_default_library",
|
||||
"//beacon-chain/rpc/eth/shared:go_default_library",
|
||||
"//beacon-chain/state:go_default_library",
|
||||
"//consensus-types/interfaces:go_default_library",
|
||||
"//consensus-types/primitives:go_default_library",
|
||||
@@ -22,7 +24,6 @@ go_library(
|
||||
"//encoding/ssz/detect:go_default_library",
|
||||
"//io/file:go_default_library",
|
||||
"//network/forks:go_default_library",
|
||||
"//proto/eth/v1:go_default_library",
|
||||
"//proto/prysm/v1alpha1:go_default_library",
|
||||
"//runtime/version:go_default_library",
|
||||
"//time/slots:go_default_library",
|
||||
@@ -38,10 +39,12 @@ go_test(
|
||||
srcs = [
|
||||
"checkpoint_test.go",
|
||||
"client_test.go",
|
||||
"health_test.go",
|
||||
],
|
||||
embed = [":go_default_library"],
|
||||
deps = [
|
||||
"//api/client:go_default_library",
|
||||
"//api/client/beacon/testing:go_default_library",
|
||||
"//beacon-chain/state:go_default_library",
|
||||
"//config/params:go_default_library",
|
||||
"//consensus-types/blocks:go_default_library",
|
||||
@@ -55,5 +58,6 @@ go_test(
|
||||
"//testing/util:go_default_library",
|
||||
"//time/slots:go_default_library",
|
||||
"@com_github_pkg_errors//:go_default_library",
|
||||
"@org_uber_go_mock//gomock:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
@@ -7,17 +7,17 @@ import (
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/pkg/errors"
|
||||
base "github.com/prysmaticlabs/prysm/v4/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v4/beacon-chain/core/helpers"
|
||||
"github.com/prysmaticlabs/prysm/v4/beacon-chain/state"
|
||||
"github.com/prysmaticlabs/prysm/v4/consensus-types/interfaces"
|
||||
"github.com/prysmaticlabs/prysm/v4/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v4/encoding/bytesutil"
|
||||
"github.com/prysmaticlabs/prysm/v4/encoding/ssz/detect"
|
||||
"github.com/prysmaticlabs/prysm/v4/io/file"
|
||||
"github.com/prysmaticlabs/prysm/v4/runtime/version"
|
||||
"github.com/prysmaticlabs/prysm/v4/time/slots"
|
||||
log "github.com/sirupsen/logrus"
|
||||
base "github.com/prysmaticlabs/prysm/v5/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v5/beacon-chain/core/helpers"
|
||||
"github.com/prysmaticlabs/prysm/v5/beacon-chain/state"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/interfaces"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v5/encoding/bytesutil"
|
||||
"github.com/prysmaticlabs/prysm/v5/encoding/ssz/detect"
|
||||
"github.com/prysmaticlabs/prysm/v5/io/file"
|
||||
"github.com/prysmaticlabs/prysm/v5/runtime/version"
|
||||
"github.com/prysmaticlabs/prysm/v5/time/slots"
|
||||
"github.com/sirupsen/logrus"
|
||||
"golang.org/x/mod/semver"
|
||||
)
|
||||
|
||||
@@ -74,7 +74,12 @@ func DownloadFinalizedData(ctx context.Context, client *Client) (*OriginData, er
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error detecting chain config for finalized state")
|
||||
}
|
||||
log.Printf("detected supported config in remote finalized state, name=%s, fork=%s", vu.Config.ConfigName, version.String(vu.Fork))
|
||||
|
||||
log.WithFields(logrus.Fields{
|
||||
"name": vu.Config.ConfigName,
|
||||
"fork": version.String(vu.Fork),
|
||||
}).Info("Detected supported config in remote finalized state")
|
||||
|
||||
s, err := vu.UnmarshalBeaconState(sb)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error unmarshaling finalized state to correct version")
|
||||
@@ -108,10 +113,10 @@ func DownloadFinalizedData(ctx context.Context, client *Client) (*OriginData, er
|
||||
}
|
||||
|
||||
log.
|
||||
WithField("block_slot", b.Block().Slot()).
|
||||
WithField("state_slot", s.Slot()).
|
||||
WithField("state_root", hexutil.Encode(sr[:])).
|
||||
WithField("block_root", hexutil.Encode(br[:])).
|
||||
WithField("blockSlot", b.Block().Slot()).
|
||||
WithField("stateSlot", s.Slot()).
|
||||
WithField("stateRoot", hexutil.Encode(sr[:])).
|
||||
WithField("blockRoot", hexutil.Encode(br[:])).
|
||||
Info("Downloaded checkpoint sync state and block.")
|
||||
return &OriginData{
|
||||
st: s,
|
||||
|
||||
@@ -9,22 +9,22 @@ import (
|
||||
"net/http"
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v4/beacon-chain/state"
|
||||
"github.com/prysmaticlabs/prysm/v4/consensus-types/blocks"
|
||||
blocktest "github.com/prysmaticlabs/prysm/v4/consensus-types/blocks/testing"
|
||||
"github.com/prysmaticlabs/prysm/v4/network/forks"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v4/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/util"
|
||||
"github.com/prysmaticlabs/prysm/v4/time/slots"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v5/beacon-chain/state"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/blocks"
|
||||
blocktest "github.com/prysmaticlabs/prysm/v5/consensus-types/blocks/testing"
|
||||
"github.com/prysmaticlabs/prysm/v5/network/forks"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/util"
|
||||
"github.com/prysmaticlabs/prysm/v5/time/slots"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/config/params"
|
||||
"github.com/prysmaticlabs/prysm/v4/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v4/encoding/ssz/detect"
|
||||
"github.com/prysmaticlabs/prysm/v4/runtime/version"
|
||||
"github.com/prysmaticlabs/prysm/v5/config/params"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v5/encoding/ssz/detect"
|
||||
"github.com/prysmaticlabs/prysm/v5/runtime/version"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/require"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
)
|
||||
|
||||
type testRT struct {
|
||||
|
||||
@@ -13,26 +13,23 @@ import (
|
||||
"strconv"
|
||||
"text/template"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v4/beacon-chain/rpc/eth/beacon"
|
||||
"github.com/prysmaticlabs/prysm/v4/beacon-chain/rpc/eth/shared"
|
||||
"github.com/prysmaticlabs/prysm/v4/network/forks"
|
||||
v1 "github.com/prysmaticlabs/prysm/v4/proto/eth/v1"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v4/beacon-chain/rpc/apimiddleware"
|
||||
"github.com/prysmaticlabs/prysm/v4/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v4/encoding/bytesutil"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v4/proto/prysm/v1alpha1"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v5/encoding/bytesutil"
|
||||
"github.com/prysmaticlabs/prysm/v5/network/forks"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
const (
|
||||
getSignedBlockPath = "/eth/v2/beacon/blocks"
|
||||
getBlockRootPath = "/eth/v1/beacon/blocks/{{.Id}}/root"
|
||||
getForkForStatePath = "/eth/v1/beacon/states/{{.Id}}/fork"
|
||||
getWeakSubjectivityPath = "/eth/v1/beacon/weak_subjectivity"
|
||||
getWeakSubjectivityPath = "/prysm/v1/beacon/weak_subjectivity"
|
||||
getForkSchedulePath = "/eth/v1/config/fork_schedule"
|
||||
getConfigSpecPath = "/eth/v1/config/spec"
|
||||
getStatePath = "/eth/v2/debug/beacon/states"
|
||||
@@ -150,8 +147,8 @@ func (c *Client) GetFork(ctx context.Context, stateId StateOrBlockId) (*ethpb.Fo
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "error requesting fork by state id = %s", stateId)
|
||||
}
|
||||
fr := &shared.Fork{}
|
||||
dataWrapper := &struct{ Data *shared.Fork }{Data: fr}
|
||||
fr := &structs.Fork{}
|
||||
dataWrapper := &struct{ Data *structs.Fork }{Data: fr}
|
||||
err = json.Unmarshal(body, dataWrapper)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error decoding json response in GetFork")
|
||||
@@ -179,12 +176,12 @@ func (c *Client) GetForkSchedule(ctx context.Context) (forks.OrderedSchedule, er
|
||||
}
|
||||
|
||||
// GetConfigSpec retrieve the current configs of the network used by the beacon node.
|
||||
func (c *Client) GetConfigSpec(ctx context.Context) (*v1.SpecResponse, error) {
|
||||
func (c *Client) GetConfigSpec(ctx context.Context) (*structs.GetSpecResponse, error) {
|
||||
body, err := c.Get(ctx, getConfigSpecPath)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error requesting configSpecPath")
|
||||
}
|
||||
fsr := &v1.SpecResponse{}
|
||||
fsr := &structs.GetSpecResponse{}
|
||||
err = json.Unmarshal(body, fsr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -259,16 +256,16 @@ func (c *Client) GetWeakSubjectivity(ctx context.Context) (*WeakSubjectivityData
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
v := &apimiddleware.WeakSubjectivityResponse{}
|
||||
v := &structs.GetWeakSubjectivityResponse{}
|
||||
err = json.Unmarshal(body, v)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
epoch, err := strconv.ParseUint(v.Data.Checkpoint.Epoch, 10, 64)
|
||||
epoch, err := strconv.ParseUint(v.Data.WsCheckpoint.Epoch, 10, 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
blockRoot, err := hexutil.Decode(v.Data.Checkpoint.Root)
|
||||
blockRoot, err := hexutil.Decode(v.Data.WsCheckpoint.Root)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -285,7 +282,7 @@ func (c *Client) GetWeakSubjectivity(ctx context.Context) (*WeakSubjectivityData
|
||||
|
||||
// SubmitChangeBLStoExecution calls a beacon API endpoint to set the withdrawal addresses based on the given signed messages.
|
||||
// If the API responds with something other than OK there will be failure messages associated to the corresponding request message.
|
||||
func (c *Client) SubmitChangeBLStoExecution(ctx context.Context, request []*shared.SignedBLSToExecutionChange) error {
|
||||
func (c *Client) SubmitChangeBLStoExecution(ctx context.Context, request []*structs.SignedBLSToExecutionChange) error {
|
||||
u := c.BaseURL().ResolveReference(&url.URL{Path: changeBLStoExecutionPath})
|
||||
body, err := json.Marshal(request)
|
||||
if err != nil {
|
||||
@@ -306,15 +303,15 @@ func (c *Client) SubmitChangeBLStoExecution(ctx context.Context, request []*shar
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
decoder := json.NewDecoder(resp.Body)
|
||||
decoder.DisallowUnknownFields()
|
||||
errorJson := &apimiddleware.IndexedVerificationFailureErrorJson{}
|
||||
errorJson := &server.IndexedVerificationFailureError{}
|
||||
if err := decoder.Decode(errorJson); err != nil {
|
||||
return errors.Wrapf(err, "failed to decode error JSON for %s", resp.Request.URL)
|
||||
}
|
||||
for _, failure := range errorJson.Failures {
|
||||
w := request[failure.Index].Message
|
||||
log.WithFields(log.Fields{
|
||||
"validator_index": w.ValidatorIndex,
|
||||
"withdrawal_address": w.ToExecutionAddress,
|
||||
log.WithFields(logrus.Fields{
|
||||
"validatorIndex": w.ValidatorIndex,
|
||||
"withdrawalAddress": w.ToExecutionAddress,
|
||||
}).Error(failure.Message)
|
||||
}
|
||||
return errors.Errorf("POST error %d: %s", errorJson.Code, errorJson.Message)
|
||||
@@ -324,12 +321,12 @@ func (c *Client) SubmitChangeBLStoExecution(ctx context.Context, request []*shar
|
||||
|
||||
// GetBLStoExecutionChanges gets all the set withdrawal messages in the node's operation pool.
|
||||
// Returns a struct representation of json response.
|
||||
func (c *Client) GetBLStoExecutionChanges(ctx context.Context) (*beacon.BLSToExecutionChangesPoolResponse, error) {
|
||||
func (c *Client) GetBLStoExecutionChanges(ctx context.Context) (*structs.BLSToExecutionChangesPoolResponse, error) {
|
||||
body, err := c.Get(ctx, changeBLStoExecutionPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
poolResponse := &beacon.BLSToExecutionChangesPoolResponse{}
|
||||
poolResponse := &structs.BLSToExecutionChangesPoolResponse{}
|
||||
err = json.Unmarshal(body, poolResponse)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -338,15 +335,15 @@ func (c *Client) GetBLStoExecutionChanges(ctx context.Context) (*beacon.BLSToExe
|
||||
}
|
||||
|
||||
type forkScheduleResponse struct {
|
||||
Data []shared.Fork
|
||||
Data []structs.Fork
|
||||
}
|
||||
|
||||
func (fsr *forkScheduleResponse) OrderedForkSchedule() (forks.OrderedSchedule, error) {
|
||||
ofs := make(forks.OrderedSchedule, 0)
|
||||
for _, d := range fsr.Data {
|
||||
epoch, err := strconv.Atoi(d.Epoch)
|
||||
epoch, err := strconv.ParseUint(d.Epoch, 10, 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, errors.Wrapf(err, "error parsing epoch %s", d.Epoch)
|
||||
}
|
||||
vSlice, err := hexutil.Decode(d.CurrentVersion)
|
||||
if err != nil {
|
||||
@@ -358,7 +355,7 @@ func (fsr *forkScheduleResponse) OrderedForkSchedule() (forks.OrderedSchedule, e
|
||||
version := bytesutil.ToBytes4(vSlice)
|
||||
ofs = append(ofs, forks.ForkScheduleEntry{
|
||||
Version: version,
|
||||
Epoch: primitives.Epoch(uint64(epoch)),
|
||||
Epoch: primitives.Epoch(epoch),
|
||||
})
|
||||
}
|
||||
sort.Sort(ofs)
|
||||
|
||||
@@ -4,8 +4,8 @@ import (
|
||||
"net/url"
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/require"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
)
|
||||
|
||||
func TestParseNodeVersion(t *testing.T) {
|
||||
|
||||
55
api/client/beacon/health.go
Normal file
55
api/client/beacon/health.go
Normal file
@@ -0,0 +1,55 @@
|
||||
package beacon
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/iface"
|
||||
)
|
||||
|
||||
type NodeHealthTracker struct {
|
||||
isHealthy *bool
|
||||
healthChan chan bool
|
||||
node iface.HealthNode
|
||||
sync.RWMutex
|
||||
}
|
||||
|
||||
func NewNodeHealthTracker(node iface.HealthNode) *NodeHealthTracker {
|
||||
return &NodeHealthTracker{
|
||||
node: node,
|
||||
healthChan: make(chan bool, 1),
|
||||
}
|
||||
}
|
||||
|
||||
// HealthUpdates provides a read-only channel for health updates.
|
||||
func (n *NodeHealthTracker) HealthUpdates() <-chan bool {
|
||||
return n.healthChan
|
||||
}
|
||||
|
||||
func (n *NodeHealthTracker) IsHealthy() bool {
|
||||
n.RLock()
|
||||
defer n.RUnlock()
|
||||
if n.isHealthy == nil {
|
||||
return false
|
||||
}
|
||||
return *n.isHealthy
|
||||
}
|
||||
|
||||
func (n *NodeHealthTracker) CheckHealth(ctx context.Context) bool {
|
||||
n.RLock()
|
||||
newStatus := n.node.IsHealthy(ctx)
|
||||
if n.isHealthy == nil {
|
||||
n.isHealthy = &newStatus
|
||||
}
|
||||
isStatusChanged := newStatus != *n.isHealthy
|
||||
n.RUnlock()
|
||||
|
||||
if isStatusChanged {
|
||||
n.Lock()
|
||||
// Double-check the condition to ensure it hasn't changed since the first check.
|
||||
n.isHealthy = &newStatus
|
||||
n.Unlock() // It's better to unlock as soon as the protected section is over.
|
||||
n.healthChan <- newStatus
|
||||
}
|
||||
return newStatus
|
||||
}
|
||||
118
api/client/beacon/health_test.go
Normal file
118
api/client/beacon/health_test.go
Normal file
@@ -0,0 +1,118 @@
|
||||
package beacon
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
healthTesting "github.com/prysmaticlabs/prysm/v5/api/client/beacon/testing"
|
||||
"go.uber.org/mock/gomock"
|
||||
)
|
||||
|
||||
func TestNodeHealth_IsHealthy(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
isHealthy bool
|
||||
want bool
|
||||
}{
|
||||
{"initially healthy", true, true},
|
||||
{"initially unhealthy", false, false},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
n := &NodeHealthTracker{
|
||||
isHealthy: &tt.isHealthy,
|
||||
healthChan: make(chan bool, 1),
|
||||
}
|
||||
if got := n.IsHealthy(); got != tt.want {
|
||||
t.Errorf("IsHealthy() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNodeHealth_UpdateNodeHealth(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
initial bool // Initial health status
|
||||
newStatus bool // Status to update to
|
||||
shouldSend bool // Should a message be sent through the channel
|
||||
}{
|
||||
{"healthy to unhealthy", true, false, true},
|
||||
{"unhealthy to healthy", false, true, true},
|
||||
{"remain healthy", true, true, false},
|
||||
{"remain unhealthy", false, false, false},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
client := healthTesting.NewMockHealthClient(ctrl)
|
||||
client.EXPECT().IsHealthy(gomock.Any()).Return(tt.newStatus)
|
||||
n := &NodeHealthTracker{
|
||||
isHealthy: &tt.initial,
|
||||
node: client,
|
||||
healthChan: make(chan bool, 1),
|
||||
}
|
||||
|
||||
s := n.CheckHealth(context.Background())
|
||||
// Check if health status was updated
|
||||
if s != tt.newStatus {
|
||||
t.Errorf("UpdateNodeHealth() failed to update isHealthy from %v to %v", tt.initial, tt.newStatus)
|
||||
}
|
||||
|
||||
select {
|
||||
case status := <-n.HealthUpdates():
|
||||
if !tt.shouldSend {
|
||||
t.Errorf("UpdateNodeHealth() unexpectedly sent status %v to HealthCh", status)
|
||||
} else if status != tt.newStatus {
|
||||
t.Errorf("UpdateNodeHealth() sent wrong status %v, want %v", status, tt.newStatus)
|
||||
}
|
||||
default:
|
||||
if tt.shouldSend {
|
||||
t.Error("UpdateNodeHealth() did not send any status to HealthCh when expected")
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNodeHealth_Concurrency(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
client := healthTesting.NewMockHealthClient(ctrl)
|
||||
n := NewNodeHealthTracker(client)
|
||||
var wg sync.WaitGroup
|
||||
|
||||
// Number of goroutines to spawn for both reading and writing
|
||||
numGoroutines := 6
|
||||
|
||||
go func() {
|
||||
for range n.HealthUpdates() {
|
||||
// Consume values to avoid blocking on channel send.
|
||||
}
|
||||
}()
|
||||
|
||||
wg.Add(numGoroutines * 2) // for readers and writers
|
||||
|
||||
// Concurrently update health status
|
||||
for i := 0; i < numGoroutines; i++ {
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
client.EXPECT().IsHealthy(gomock.Any()).Return(false)
|
||||
n.CheckHealth(context.Background())
|
||||
client.EXPECT().IsHealthy(gomock.Any()).Return(true)
|
||||
n.CheckHealth(context.Background())
|
||||
}()
|
||||
}
|
||||
|
||||
// Concurrently read health status
|
||||
for i := 0; i < numGoroutines; i++ {
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
_ = n.IsHealthy() // Just read the value
|
||||
}()
|
||||
}
|
||||
|
||||
wg.Wait() // Wait for all goroutines to finish
|
||||
}
|
||||
8
api/client/beacon/iface/BUILD.bazel
Normal file
8
api/client/beacon/iface/BUILD.bazel
Normal file
@@ -0,0 +1,8 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = ["health.go"],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/client/beacon/iface",
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
13
api/client/beacon/iface/health.go
Normal file
13
api/client/beacon/iface/health.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package iface
|
||||
|
||||
import "context"
|
||||
|
||||
type HealthTracker interface {
|
||||
HealthUpdates() <-chan bool
|
||||
IsHealthy() bool
|
||||
CheckHealth(ctx context.Context) bool
|
||||
}
|
||||
|
||||
type HealthNode interface {
|
||||
IsHealthy(ctx context.Context) bool
|
||||
}
|
||||
5
api/client/beacon/log.go
Normal file
5
api/client/beacon/log.go
Normal file
@@ -0,0 +1,5 @@
|
||||
package beacon
|
||||
|
||||
import "github.com/sirupsen/logrus"
|
||||
|
||||
var log = logrus.WithField("prefix", "beacon")
|
||||
12
api/client/beacon/testing/BUILD.bazel
Normal file
12
api/client/beacon/testing/BUILD.bazel
Normal file
@@ -0,0 +1,12 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = ["mock.go"],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/client/beacon/testing",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//api/client/beacon/iface:go_default_library",
|
||||
"@org_uber_go_mock//gomock:go_default_library",
|
||||
],
|
||||
)
|
||||
53
api/client/beacon/testing/mock.go
Normal file
53
api/client/beacon/testing/mock.go
Normal file
@@ -0,0 +1,53 @@
|
||||
package testing
|
||||
|
||||
import (
|
||||
"context"
|
||||
"reflect"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/beacon/iface"
|
||||
"go.uber.org/mock/gomock"
|
||||
)
|
||||
|
||||
var (
|
||||
_ = iface.HealthNode(&MockHealthClient{})
|
||||
)
|
||||
|
||||
// MockHealthClient is a mock of HealthClient interface.
|
||||
type MockHealthClient struct {
|
||||
ctrl *gomock.Controller
|
||||
recorder *MockHealthClientMockRecorder
|
||||
}
|
||||
|
||||
// MockHealthClientMockRecorder is the mock recorder for MockHealthClient.
|
||||
type MockHealthClientMockRecorder struct {
|
||||
mock *MockHealthClient
|
||||
}
|
||||
|
||||
// IsHealthy mocks base method.
|
||||
func (m *MockHealthClient) IsHealthy(arg0 context.Context) bool {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "IsHealthy", arg0)
|
||||
ret0, ok := ret[0].(bool)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
return ret0
|
||||
}
|
||||
|
||||
// EXPECT returns an object that allows the caller to indicate expected use.
|
||||
func (m *MockHealthClient) EXPECT() *MockHealthClientMockRecorder {
|
||||
return m.recorder
|
||||
}
|
||||
|
||||
// IsHealthy indicates an expected call of IsHealthy.
|
||||
func (mr *MockHealthClientMockRecorder) IsHealthy(arg0 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsHealthy", reflect.TypeOf((*MockHealthClient)(nil).IsHealthy), arg0)
|
||||
}
|
||||
|
||||
// NewMockHealthClient creates a new mock instance.
|
||||
func NewMockHealthClient(ctrl *gomock.Controller) *MockHealthClient {
|
||||
mock := &MockHealthClient{ctrl: ctrl}
|
||||
mock.recorder = &MockHealthClientMockRecorder{mock}
|
||||
return mock
|
||||
}
|
||||
@@ -8,10 +8,10 @@ go_library(
|
||||
"errors.go",
|
||||
"types.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v4/api/client/builder",
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/client/builder",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//beacon-chain/rpc/eth/shared:go_default_library",
|
||||
"//api/server/structs:go_default_library",
|
||||
"//config/fieldparams:go_default_library",
|
||||
"//consensus-types:go_default_library",
|
||||
"//consensus-types/blocks:go_default_library",
|
||||
@@ -20,8 +20,6 @@ go_library(
|
||||
"//encoding/bytesutil:go_default_library",
|
||||
"//math:go_default_library",
|
||||
"//monitoring/tracing:go_default_library",
|
||||
"//network:go_default_library",
|
||||
"//network/authorization:go_default_library",
|
||||
"//proto/engine/v1:go_default_library",
|
||||
"//proto/prysm/v1alpha1:go_default_library",
|
||||
"//runtime/version:go_default_library",
|
||||
@@ -42,7 +40,7 @@ go_test(
|
||||
data = glob(["testdata/**"]),
|
||||
embed = [":go_default_library"],
|
||||
deps = [
|
||||
"//beacon-chain/rpc/eth/shared:go_default_library",
|
||||
"//api/server/structs:go_default_library",
|
||||
"//config/fieldparams:go_default_library",
|
||||
"//config/params:go_default_library",
|
||||
"//consensus-types/blocks:go_default_library",
|
||||
|
||||
@@ -3,12 +3,11 @@ package builder
|
||||
import (
|
||||
"github.com/pkg/errors"
|
||||
ssz "github.com/prysmaticlabs/fastssz"
|
||||
consensus_types "github.com/prysmaticlabs/prysm/v4/consensus-types"
|
||||
"github.com/prysmaticlabs/prysm/v4/consensus-types/blocks"
|
||||
"github.com/prysmaticlabs/prysm/v4/consensus-types/interfaces"
|
||||
enginev1 "github.com/prysmaticlabs/prysm/v4/proto/engine/v1"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v4/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/v4/runtime/version"
|
||||
consensus_types "github.com/prysmaticlabs/prysm/v5/consensus-types"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/blocks"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/interfaces"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/v5/runtime/version"
|
||||
)
|
||||
|
||||
// SignedBid is an interface describing the method set of a signed builder bid.
|
||||
@@ -22,7 +21,7 @@ type SignedBid interface {
|
||||
// Bid is an interface describing the method set of a builder bid.
|
||||
type Bid interface {
|
||||
Header() (interfaces.ExecutionData, error)
|
||||
BlindedBlobsBundle() (*enginev1.BlindedBlobsBundle, error)
|
||||
BlobKzgCommitments() ([][]byte, error)
|
||||
Value() []byte
|
||||
Pubkey() []byte
|
||||
Version() int
|
||||
@@ -115,9 +114,9 @@ func (b builderBid) Header() (interfaces.ExecutionData, error) {
|
||||
return blocks.WrappedExecutionPayloadHeader(b.p.Header)
|
||||
}
|
||||
|
||||
// BlindedBlobsBundle --
|
||||
func (b builderBid) BlindedBlobsBundle() (*enginev1.BlindedBlobsBundle, error) {
|
||||
return nil, errors.New("blinded blobs bundle not available before Deneb")
|
||||
// BlobKzgCommitments --
|
||||
func (b builderBid) BlobKzgCommitments() ([][]byte, error) {
|
||||
return [][]byte{}, errors.New("blob kzg commitments not available before Deneb")
|
||||
}
|
||||
|
||||
// Version --
|
||||
@@ -166,12 +165,12 @@ func WrappedBuilderBidCapella(p *ethpb.BuilderBidCapella) (Bid, error) {
|
||||
// Header returns the execution data interface.
|
||||
func (b builderBidCapella) Header() (interfaces.ExecutionData, error) {
|
||||
// We have to convert big endian to little endian because the value is coming from the execution layer.
|
||||
return blocks.WrappedExecutionPayloadHeaderCapella(b.p.Header, blocks.PayloadValueToGwei(b.p.Value))
|
||||
return blocks.WrappedExecutionPayloadHeaderCapella(b.p.Header, blocks.PayloadValueToWei(b.p.Value))
|
||||
}
|
||||
|
||||
// BlindedBlobsBundle --
|
||||
func (b builderBidCapella) BlindedBlobsBundle() (*enginev1.BlindedBlobsBundle, error) {
|
||||
return nil, errors.New("blinded blobs bundle not available before Deneb")
|
||||
// BlobKzgCommitments --
|
||||
func (b builderBidCapella) BlobKzgCommitments() ([][]byte, error) {
|
||||
return [][]byte{}, errors.New("blob kzg commitments not available before Deneb")
|
||||
}
|
||||
|
||||
// Version --
|
||||
@@ -250,12 +249,12 @@ func (b builderBidDeneb) HashTreeRootWith(hh *ssz.Hasher) error {
|
||||
// Header --
|
||||
func (b builderBidDeneb) Header() (interfaces.ExecutionData, error) {
|
||||
// We have to convert big endian to little endian because the value is coming from the execution layer.
|
||||
return blocks.WrappedExecutionPayloadHeaderDeneb(b.p.Header, blocks.PayloadValueToGwei(b.p.Value))
|
||||
return blocks.WrappedExecutionPayloadHeaderDeneb(b.p.Header, blocks.PayloadValueToWei(b.p.Value))
|
||||
}
|
||||
|
||||
// BlindedBlobsBundle --
|
||||
func (b builderBidDeneb) BlindedBlobsBundle() (*enginev1.BlindedBlobsBundle, error) {
|
||||
return b.p.BlindedBlobsBundle, nil
|
||||
// BlobKzgCommitments --
|
||||
func (b builderBidDeneb) BlobKzgCommitments() ([][]byte, error) {
|
||||
return b.p.BlobKzgCommitments, nil
|
||||
}
|
||||
|
||||
type signedBuilderBidDeneb struct {
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"math/big"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
@@ -13,17 +14,15 @@ import (
|
||||
"text/template"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v4/beacon-chain/rpc/eth/shared"
|
||||
"github.com/prysmaticlabs/prysm/v4/consensus-types/blocks"
|
||||
"github.com/prysmaticlabs/prysm/v4/consensus-types/interfaces"
|
||||
"github.com/prysmaticlabs/prysm/v4/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v4/encoding/bytesutil"
|
||||
"github.com/prysmaticlabs/prysm/v4/monitoring/tracing"
|
||||
"github.com/prysmaticlabs/prysm/v4/network"
|
||||
"github.com/prysmaticlabs/prysm/v4/network/authorization"
|
||||
v1 "github.com/prysmaticlabs/prysm/v4/proto/engine/v1"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v4/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/v4/runtime/version"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/blocks"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/interfaces"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v5/encoding/bytesutil"
|
||||
"github.com/prysmaticlabs/prysm/v5/monitoring/tracing"
|
||||
v1 "github.com/prysmaticlabs/prysm/v5/proto/engine/v1"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/v5/runtime/version"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"go.opencensus.io/trace"
|
||||
)
|
||||
@@ -58,8 +57,8 @@ func (*requestLogger) observe(r *http.Request) (e error) {
|
||||
b := bytes.NewBuffer(nil)
|
||||
if r.Body == nil {
|
||||
log.WithFields(log.Fields{
|
||||
"body-base64": "(nil value)",
|
||||
"url": r.URL.String(),
|
||||
"bodyBase64": "(nil value)",
|
||||
"url": r.URL.String(),
|
||||
}).Info("builder http request")
|
||||
return nil
|
||||
}
|
||||
@@ -75,8 +74,8 @@ func (*requestLogger) observe(r *http.Request) (e error) {
|
||||
}
|
||||
r.Body = io.NopCloser(b)
|
||||
log.WithFields(log.Fields{
|
||||
"body-base64": string(body),
|
||||
"url": r.URL.String(),
|
||||
"bodyBase64": string(body),
|
||||
"url": r.URL.String(),
|
||||
}).Info("builder http request")
|
||||
|
||||
return nil
|
||||
@@ -89,7 +88,7 @@ type BuilderClient interface {
|
||||
NodeURL() string
|
||||
GetHeader(ctx context.Context, slot primitives.Slot, parentHash [32]byte, pubkey [48]byte) (SignedBid, error)
|
||||
RegisterValidator(ctx context.Context, svr []*ethpb.SignedValidatorRegistrationV1) error
|
||||
SubmitBlindedBlock(ctx context.Context, sb interfaces.ReadOnlySignedBeaconBlock, blobs []*ethpb.SignedBlindedBlobSidecar) (interfaces.ExecutionData, *v1.BlobsBundle, error)
|
||||
SubmitBlindedBlock(ctx context.Context, sb interfaces.ReadOnlySignedBeaconBlock) (interfaces.ExecutionData, *v1.BlobsBundle, error)
|
||||
Status(ctx context.Context) error
|
||||
}
|
||||
|
||||
@@ -104,8 +103,7 @@ type Client struct {
|
||||
// `host` is the base host + port used to construct request urls. This value can be
|
||||
// a URL string, or NewClient will assume an http endpoint if just `host:port` is used.
|
||||
func NewClient(host string, opts ...ClientOpt) (*Client, error) {
|
||||
endpoint := covertEndPoint(host)
|
||||
u, err := urlForHost(endpoint.Url)
|
||||
u, err := urlForHost(host)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -121,8 +119,7 @@ func NewClient(host string, opts ...ClientOpt) (*Client, error) {
|
||||
|
||||
func urlForHost(h string) (*url.URL, error) {
|
||||
// try to parse as url (being permissive)
|
||||
u, err := url.Parse(h)
|
||||
if err == nil && u.Host != "" {
|
||||
if u, err := url.Parse(h); err == nil && u.Host != "" {
|
||||
return u, nil
|
||||
}
|
||||
// try to parse as host:port
|
||||
@@ -140,7 +137,7 @@ func (c *Client) NodeURL() string {
|
||||
|
||||
type reqOption func(*http.Request)
|
||||
|
||||
// do is a generic, opinionated request function to reduce boilerplate amongst the methods in this package api/client/builder/types.go.
|
||||
// do is a generic, opinionated request function to reduce boilerplate amongst the methods in this package api/client/builder.
|
||||
func (c *Client) do(ctx context.Context, method string, path string, body io.Reader, opts ...reqOption) (res []byte, err error) {
|
||||
ctx, span := trace.StartSpan(ctx, "builder.client.do")
|
||||
defer func() {
|
||||
@@ -270,13 +267,9 @@ func (c *Client) RegisterValidator(ctx context.Context, svr []*ethpb.SignedValid
|
||||
tracing.AnnotateError(span, err)
|
||||
return err
|
||||
}
|
||||
vs := make([]*shared.SignedValidatorRegistration, len(svr))
|
||||
vs := make([]*structs.SignedValidatorRegistration, len(svr))
|
||||
for i := 0; i < len(svr); i++ {
|
||||
svrJson, err := shared.SignedValidatorRegistrationFromConsensus(svr[i])
|
||||
if err != nil {
|
||||
return errors.Wrap(err, fmt.Sprintf("failed to encode to SignedValidatorRegistration at index %d", i))
|
||||
}
|
||||
vs[i] = svrJson
|
||||
vs[i] = structs.SignedValidatorRegistrationFromConsensus(svr[i])
|
||||
}
|
||||
body, err := json.Marshal(vs)
|
||||
if err != nil {
|
||||
@@ -291,7 +284,7 @@ func (c *Client) RegisterValidator(ctx context.Context, svr []*ethpb.SignedValid
|
||||
|
||||
// SubmitBlindedBlock calls the builder API endpoint that binds the validator to the builder and submits the block.
|
||||
// The response is the full execution payload used to create the blinded block.
|
||||
func (c *Client) SubmitBlindedBlock(ctx context.Context, sb interfaces.ReadOnlySignedBeaconBlock, blobs []*ethpb.SignedBlindedBlobSidecar) (interfaces.ExecutionData, *v1.BlobsBundle, error) {
|
||||
func (c *Client) SubmitBlindedBlock(ctx context.Context, sb interfaces.ReadOnlySignedBeaconBlock) (interfaces.ExecutionData, *v1.BlobsBundle, error) {
|
||||
if !sb.IsBlinded() {
|
||||
return nil, nil, errNotBlinded
|
||||
}
|
||||
@@ -301,7 +294,7 @@ func (c *Client) SubmitBlindedBlock(ctx context.Context, sb interfaces.ReadOnlyS
|
||||
if err != nil {
|
||||
return nil, nil, errors.Wrapf(err, "could not get protobuf block")
|
||||
}
|
||||
b, err := shared.SignedBlindedBeaconBlockBellatrixFromConsensus(ðpb.SignedBlindedBeaconBlockBellatrix{Block: psb.Block, Signature: bytesutil.SafeCopyBytes(psb.Signature)})
|
||||
b, err := structs.SignedBlindedBeaconBlockBellatrixFromConsensus(ðpb.SignedBlindedBeaconBlockBellatrix{Block: psb.Block, Signature: bytesutil.SafeCopyBytes(psb.Signature)})
|
||||
if err != nil {
|
||||
return nil, nil, errors.Wrapf(err, "could not convert SignedBlindedBeaconBlockBellatrix to json marshalable type")
|
||||
}
|
||||
@@ -311,6 +304,8 @@ func (c *Client) SubmitBlindedBlock(ctx context.Context, sb interfaces.ReadOnlyS
|
||||
}
|
||||
versionOpt := func(r *http.Request) {
|
||||
r.Header.Add("Eth-Consensus-Version", version.String(version.Bellatrix))
|
||||
r.Header.Set("Content-Type", "application/json")
|
||||
r.Header.Set("Accept", "application/json")
|
||||
}
|
||||
rb, err := c.do(ctx, http.MethodPost, postBlindedBeaconBlockPath, bytes.NewBuffer(body), versionOpt)
|
||||
|
||||
@@ -338,7 +333,7 @@ func (c *Client) SubmitBlindedBlock(ctx context.Context, sb interfaces.ReadOnlyS
|
||||
if err != nil {
|
||||
return nil, nil, errors.Wrapf(err, "could not get protobuf block")
|
||||
}
|
||||
b, err := shared.SignedBlindedBeaconBlockCapellaFromConsensus(ðpb.SignedBlindedBeaconBlockCapella{Block: psb.Block, Signature: bytesutil.SafeCopyBytes(psb.Signature)})
|
||||
b, err := structs.SignedBlindedBeaconBlockCapellaFromConsensus(ðpb.SignedBlindedBeaconBlockCapella{Block: psb.Block, Signature: bytesutil.SafeCopyBytes(psb.Signature)})
|
||||
if err != nil {
|
||||
return nil, nil, errors.Wrapf(err, "could not convert SignedBlindedBeaconBlockCapella to json marshalable type")
|
||||
}
|
||||
@@ -348,6 +343,8 @@ func (c *Client) SubmitBlindedBlock(ctx context.Context, sb interfaces.ReadOnlyS
|
||||
}
|
||||
versionOpt := func(r *http.Request) {
|
||||
r.Header.Add("Eth-Consensus-Version", version.String(version.Capella))
|
||||
r.Header.Set("Content-Type", "application/json")
|
||||
r.Header.Set("Accept", "application/json")
|
||||
}
|
||||
rb, err := c.do(ctx, http.MethodPost, postBlindedBeaconBlockPath, bytes.NewBuffer(body), versionOpt)
|
||||
|
||||
@@ -365,7 +362,7 @@ func (c *Client) SubmitBlindedBlock(ctx context.Context, sb interfaces.ReadOnlyS
|
||||
if err != nil {
|
||||
return nil, nil, errors.Wrapf(err, "could not extract proto message from payload")
|
||||
}
|
||||
payload, err := blocks.WrappedExecutionPayloadCapella(p, 0)
|
||||
payload, err := blocks.WrappedExecutionPayloadCapella(p, big.NewInt(0))
|
||||
if err != nil {
|
||||
return nil, nil, errors.Wrapf(err, "could not wrap execution payload in interface")
|
||||
}
|
||||
@@ -375,9 +372,9 @@ func (c *Client) SubmitBlindedBlock(ctx context.Context, sb interfaces.ReadOnlyS
|
||||
if err != nil {
|
||||
return nil, nil, errors.Wrapf(err, "could not get protobuf block")
|
||||
}
|
||||
b, err := shared.SignedBlindedBeaconBlockContentsDenebFromConsensus(ðpb.SignedBlindedBeaconBlockAndBlobsDeneb{SignedBlindedBlock: psb, SignedBlindedBlobSidecars: blobs})
|
||||
b, err := structs.SignedBlindedBeaconBlockDenebFromConsensus(ðpb.SignedBlindedBeaconBlockDeneb{Message: psb.Message, Signature: bytesutil.SafeCopyBytes(psb.Signature)})
|
||||
if err != nil {
|
||||
return nil, nil, errors.Wrapf(err, "could not convert SignedBlindedBeaconBlockContentsDeneb to json marshalable type")
|
||||
return nil, nil, errors.Wrapf(err, "could not convert SignedBlindedBeaconBlockDeneb to json marshalable type")
|
||||
}
|
||||
body, err := json.Marshal(b)
|
||||
if err != nil {
|
||||
@@ -386,6 +383,8 @@ func (c *Client) SubmitBlindedBlock(ctx context.Context, sb interfaces.ReadOnlyS
|
||||
|
||||
versionOpt := func(r *http.Request) {
|
||||
r.Header.Add("Eth-Consensus-Version", version.String(version.Deneb))
|
||||
r.Header.Set("Content-Type", "application/json")
|
||||
r.Header.Set("Accept", "application/json")
|
||||
}
|
||||
rb, err := c.do(ctx, http.MethodPost, postBlindedBeaconBlockPath, bytes.NewBuffer(body), versionOpt)
|
||||
if err != nil {
|
||||
@@ -402,7 +401,7 @@ func (c *Client) SubmitBlindedBlock(ctx context.Context, sb interfaces.ReadOnlyS
|
||||
if err != nil {
|
||||
return nil, nil, errors.Wrapf(err, "could not extract proto message from payload")
|
||||
}
|
||||
payload, err := blocks.WrappedExecutionPayloadDeneb(p, 0)
|
||||
payload, err := blocks.WrappedExecutionPayloadDeneb(p, big.NewInt(0))
|
||||
if err != nil {
|
||||
return nil, nil, errors.Wrapf(err, "could not wrap execution payload in interface")
|
||||
}
|
||||
@@ -431,38 +430,29 @@ func non200Err(response *http.Response) error {
|
||||
}
|
||||
msg := fmt.Sprintf("code=%d, url=%s, body=%s", response.StatusCode, response.Request.URL, body)
|
||||
switch response.StatusCode {
|
||||
case 204:
|
||||
case http.StatusNoContent:
|
||||
log.WithError(ErrNoContent).Debug(msg)
|
||||
return ErrNoContent
|
||||
case 400:
|
||||
if jsonErr := json.Unmarshal(bodyBytes, &errMessage); jsonErr != nil {
|
||||
return errors.Wrap(jsonErr, "unable to read response body")
|
||||
}
|
||||
case http.StatusBadRequest:
|
||||
log.WithError(ErrBadRequest).Debug(msg)
|
||||
if jsonErr := json.Unmarshal(bodyBytes, &errMessage); jsonErr != nil {
|
||||
return errors.Wrap(jsonErr, "unable to read response body")
|
||||
}
|
||||
return errors.Wrap(ErrBadRequest, errMessage.Message)
|
||||
case 404:
|
||||
if jsonErr := json.Unmarshal(bodyBytes, &errMessage); jsonErr != nil {
|
||||
return errors.Wrap(jsonErr, "unable to read response body")
|
||||
}
|
||||
case http.StatusNotFound:
|
||||
log.WithError(ErrNotFound).Debug(msg)
|
||||
return errors.Wrap(ErrNotFound, errMessage.Message)
|
||||
case 500:
|
||||
if jsonErr := json.Unmarshal(bodyBytes, &errMessage); jsonErr != nil {
|
||||
return errors.Wrap(jsonErr, "unable to read response body")
|
||||
}
|
||||
return errors.Wrap(ErrNotFound, errMessage.Message)
|
||||
case http.StatusInternalServerError:
|
||||
log.WithError(ErrNotOK).Debug(msg)
|
||||
if jsonErr := json.Unmarshal(bodyBytes, &errMessage); jsonErr != nil {
|
||||
return errors.Wrap(jsonErr, "unable to read response body")
|
||||
}
|
||||
return errors.Wrap(ErrNotOK, errMessage.Message)
|
||||
default:
|
||||
log.WithError(ErrNotOK).Debug(msg)
|
||||
return errors.Wrap(ErrNotOK, fmt.Sprintf("unsupported error code: %d", response.StatusCode))
|
||||
}
|
||||
}
|
||||
|
||||
func covertEndPoint(ep string) network.Endpoint {
|
||||
return network.Endpoint{
|
||||
Url: ep,
|
||||
Auth: network.AuthorizationData{ // Auth is not used for builder.
|
||||
Method: authorization.None,
|
||||
Value: "",
|
||||
}}
|
||||
}
|
||||
|
||||
@@ -12,18 +12,16 @@ import (
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/prysmaticlabs/go-bitfield"
|
||||
"github.com/prysmaticlabs/prysm/v4/beacon-chain/rpc/eth/shared"
|
||||
fieldparams "github.com/prysmaticlabs/prysm/v4/config/fieldparams"
|
||||
"github.com/prysmaticlabs/prysm/v4/config/params"
|
||||
"github.com/prysmaticlabs/prysm/v4/consensus-types/blocks"
|
||||
types "github.com/prysmaticlabs/prysm/v4/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v4/encoding/bytesutil"
|
||||
v1 "github.com/prysmaticlabs/prysm/v4/proto/engine/v1"
|
||||
eth "github.com/prysmaticlabs/prysm/v4/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/require"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
"github.com/prysmaticlabs/prysm/v5/config/params"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/blocks"
|
||||
types "github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v5/encoding/bytesutil"
|
||||
v1 "github.com/prysmaticlabs/prysm/v5/proto/engine/v1"
|
||||
eth "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
@@ -270,28 +268,20 @@ func TestClient_GetHeader(t *testing.T) {
|
||||
bidValue := bytesutil.ReverseByteOrder(bid.Value())
|
||||
require.DeepEqual(t, bidValue, value.Bytes())
|
||||
require.DeepEqual(t, big.NewInt(0).SetBytes(bidValue), value.Int)
|
||||
bundle, err := bid.BlindedBlobsBundle()
|
||||
kcgCommitments, err := bid.BlobKzgCommitments()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, len(bundle.BlobRoots) <= fieldparams.MaxBlobsPerBlock && len(bundle.BlobRoots) > 0, true)
|
||||
for i := range bundle.BlobRoots {
|
||||
require.Equal(t, len(bundle.BlobRoots[i]) == fieldparams.RootLength, true)
|
||||
}
|
||||
require.Equal(t, len(bundle.KzgCommitments) > 0, true)
|
||||
for i := range bundle.KzgCommitments {
|
||||
require.Equal(t, len(bundle.KzgCommitments[i]) == 48, true)
|
||||
}
|
||||
require.Equal(t, len(bundle.Proofs) > 0, true)
|
||||
for i := range bundle.Proofs {
|
||||
require.Equal(t, len(bundle.Proofs[i]) == 48, true)
|
||||
require.Equal(t, len(kcgCommitments) > 0, true)
|
||||
for i := range kcgCommitments {
|
||||
require.Equal(t, len(kcgCommitments[i]) == 48, true)
|
||||
}
|
||||
})
|
||||
t.Run("deneb, no bundle", func(t *testing.T) {
|
||||
t.Run("deneb, too many kzg commitments", func(t *testing.T) {
|
||||
hc := &http.Client{
|
||||
Transport: roundtrip(func(r *http.Request) (*http.Response, error) {
|
||||
require.Equal(t, expectedPath, r.URL.Path)
|
||||
return &http.Response{
|
||||
StatusCode: http.StatusOK,
|
||||
Body: io.NopCloser(bytes.NewBufferString(testExampleHeaderResponseDenebNoBundle)),
|
||||
Body: io.NopCloser(bytes.NewBufferString(testExampleHeaderResponseDenebTooManyBlobs)),
|
||||
Request: r.Clone(ctx),
|
||||
}, nil
|
||||
}),
|
||||
@@ -300,27 +290,9 @@ func TestClient_GetHeader(t *testing.T) {
|
||||
hc: hc,
|
||||
baseURL: &url.URL{Host: "localhost:3500", Scheme: "http"},
|
||||
}
|
||||
h, err := c.GetHeader(ctx, slot, bytesutil.ToBytes32(parentHash), bytesutil.ToBytes48(pubkey))
|
||||
require.NoError(t, err)
|
||||
expectedWithdrawalsRoot := ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2")
|
||||
bid, err := h.Message()
|
||||
require.NoError(t, err)
|
||||
bidHeader, err := bid.Header()
|
||||
require.NoError(t, err)
|
||||
withdrawalsRoot, err := bidHeader.WithdrawalsRoot()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, true, bytes.Equal(expectedWithdrawalsRoot, withdrawalsRoot))
|
||||
value, err := stringToUint256("652312848583266388373324160190187140051835877600158453279131187530910662656")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, fmt.Sprintf("%#x", value.SSZBytes()), fmt.Sprintf("%#x", bid.Value()))
|
||||
bidValue := bytesutil.ReverseByteOrder(bid.Value())
|
||||
require.DeepEqual(t, bidValue, value.Bytes())
|
||||
require.DeepEqual(t, big.NewInt(0).SetBytes(bidValue), value.Int)
|
||||
bundle, err := bid.BlindedBlobsBundle()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, (*v1.BlindedBlobsBundle)(nil), bundle)
|
||||
_, err := c.GetHeader(ctx, slot, bytesutil.ToBytes32(parentHash), bytesutil.ToBytes48(pubkey))
|
||||
require.ErrorContains(t, "could not extract proto message from header: too many blob commitments: 7", err)
|
||||
})
|
||||
|
||||
t.Run("unsupported version", func(t *testing.T) {
|
||||
hc := &http.Client{
|
||||
Transport: roundtrip(func(r *http.Request) (*http.Response, error) {
|
||||
@@ -349,6 +321,8 @@ func TestSubmitBlindedBlock(t *testing.T) {
|
||||
Transport: roundtrip(func(r *http.Request) (*http.Response, error) {
|
||||
require.Equal(t, postBlindedBeaconBlockPath, r.URL.Path)
|
||||
require.Equal(t, "bellatrix", r.Header.Get("Eth-Consensus-Version"))
|
||||
require.Equal(t, "application/json", r.Header.Get("Content-Type"))
|
||||
require.Equal(t, "application/json", r.Header.Get("Accept"))
|
||||
return &http.Response{
|
||||
StatusCode: http.StatusOK,
|
||||
Body: io.NopCloser(bytes.NewBufferString(testExampleExecutionPayload)),
|
||||
@@ -362,7 +336,7 @@ func TestSubmitBlindedBlock(t *testing.T) {
|
||||
}
|
||||
sbbb, err := blocks.NewSignedBeaconBlock(testSignedBlindedBeaconBlockBellatrix(t))
|
||||
require.NoError(t, err)
|
||||
ep, _, err := c.SubmitBlindedBlock(ctx, sbbb, nil)
|
||||
ep, _, err := c.SubmitBlindedBlock(ctx, sbbb)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, true, bytes.Equal(ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"), ep.ParentHash()))
|
||||
bfpg, err := stringToUint256("452312848583266388373324160190187140051835877600158453279131187530910662656")
|
||||
@@ -375,6 +349,8 @@ func TestSubmitBlindedBlock(t *testing.T) {
|
||||
Transport: roundtrip(func(r *http.Request) (*http.Response, error) {
|
||||
require.Equal(t, postBlindedBeaconBlockPath, r.URL.Path)
|
||||
require.Equal(t, "capella", r.Header.Get("Eth-Consensus-Version"))
|
||||
require.Equal(t, "application/json", r.Header.Get("Content-Type"))
|
||||
require.Equal(t, "application/json", r.Header.Get("Accept"))
|
||||
return &http.Response{
|
||||
StatusCode: http.StatusOK,
|
||||
Body: io.NopCloser(bytes.NewBufferString(testExampleExecutionPayloadCapella)),
|
||||
@@ -388,7 +364,7 @@ func TestSubmitBlindedBlock(t *testing.T) {
|
||||
}
|
||||
sbb, err := blocks.NewSignedBeaconBlock(testSignedBlindedBeaconBlockCapella(t))
|
||||
require.NoError(t, err)
|
||||
ep, _, err := c.SubmitBlindedBlock(ctx, sbb, nil)
|
||||
ep, _, err := c.SubmitBlindedBlock(ctx, sbb)
|
||||
require.NoError(t, err)
|
||||
withdrawals, err := ep.Withdrawals()
|
||||
require.NoError(t, err)
|
||||
@@ -399,18 +375,19 @@ func TestSubmitBlindedBlock(t *testing.T) {
|
||||
assert.Equal(t, uint64(1), withdrawals[0].Amount)
|
||||
})
|
||||
t.Run("deneb", func(t *testing.T) {
|
||||
|
||||
test := testSignedBlindedBeaconBlockAndBlobsDeneb(t)
|
||||
test := testSignedBlindedBeaconBlockDeneb(t)
|
||||
hc := &http.Client{
|
||||
Transport: roundtrip(func(r *http.Request) (*http.Response, error) {
|
||||
require.Equal(t, postBlindedBeaconBlockPath, r.URL.Path)
|
||||
require.Equal(t, "deneb", r.Header.Get("Eth-Consensus-Version"))
|
||||
var req shared.SignedBlindedBeaconBlockContentsDeneb
|
||||
require.Equal(t, "application/json", r.Header.Get("Content-Type"))
|
||||
require.Equal(t, "application/json", r.Header.Get("Accept"))
|
||||
var req structs.SignedBlindedBeaconBlockDeneb
|
||||
err := json.NewDecoder(r.Body).Decode(&req)
|
||||
require.NoError(t, err)
|
||||
block, err := req.SignedBlindedBlock.ToConsensus()
|
||||
block, err := req.ToConsensus()
|
||||
require.NoError(t, err)
|
||||
require.DeepEqual(t, block, test.SignedBlindedBlock)
|
||||
require.DeepEqual(t, block, test)
|
||||
return &http.Response{
|
||||
StatusCode: http.StatusOK,
|
||||
Body: io.NopCloser(bytes.NewBufferString(testExampleExecutionPayloadDeneb)),
|
||||
@@ -423,10 +400,10 @@ func TestSubmitBlindedBlock(t *testing.T) {
|
||||
baseURL: &url.URL{Host: "localhost:3500", Scheme: "http"},
|
||||
}
|
||||
|
||||
sbb, err := blocks.NewSignedBeaconBlock(test.SignedBlindedBlock)
|
||||
sbb, err := blocks.NewSignedBeaconBlock(test)
|
||||
require.NoError(t, err)
|
||||
|
||||
ep, blobBundle, err := c.SubmitBlindedBlock(ctx, sbb, test.SignedBlindedBlobSidecars)
|
||||
ep, blobBundle, err := c.SubmitBlindedBlock(ctx, sbb)
|
||||
require.NoError(t, err)
|
||||
withdrawals, err := ep.Withdrawals()
|
||||
require.NoError(t, err)
|
||||
@@ -436,9 +413,6 @@ func TestSubmitBlindedBlock(t *testing.T) {
|
||||
assert.DeepEqual(t, ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943"), withdrawals[0].Address)
|
||||
assert.Equal(t, uint64(1), withdrawals[0].Amount)
|
||||
require.NotNil(t, blobBundle)
|
||||
require.Equal(t, hexutil.Encode(blobBundle.Blobs[0]), hexutil.Encode(make([]byte, fieldparams.BlobLength)))
|
||||
require.Equal(t, hexutil.Encode(blobBundle.KzgCommitments[0]), "0x8dab030c51e16e84be9caab84ee3d0b8bbec1db4a0e4de76439da8424d9b957370a10a78851f97e4b54d2ce1ab0d686f")
|
||||
require.Equal(t, hexutil.Encode(blobBundle.Proofs[0]), "0xb4021b0de10f743893d4f71e1bf830c019e832958efd6795baf2f83b8699a9eccc5dc99015d8d4d8ec370d0cc333c06a")
|
||||
})
|
||||
t.Run("mismatched versions, expected bellatrix got capella", func(t *testing.T) {
|
||||
hc := &http.Client{
|
||||
@@ -457,13 +431,13 @@ func TestSubmitBlindedBlock(t *testing.T) {
|
||||
}
|
||||
sbbb, err := blocks.NewSignedBeaconBlock(testSignedBlindedBeaconBlockBellatrix(t))
|
||||
require.NoError(t, err)
|
||||
_, _, err = c.SubmitBlindedBlock(ctx, sbbb, nil)
|
||||
_, _, err = c.SubmitBlindedBlock(ctx, sbbb)
|
||||
require.ErrorContains(t, "not a bellatrix payload", err)
|
||||
})
|
||||
t.Run("not blinded", func(t *testing.T) {
|
||||
sbb, err := blocks.NewSignedBeaconBlock(ð.SignedBeaconBlockBellatrix{Block: ð.BeaconBlockBellatrix{Body: ð.BeaconBlockBodyBellatrix{}}})
|
||||
sbb, err := blocks.NewSignedBeaconBlock(ð.SignedBeaconBlockBellatrix{Block: ð.BeaconBlockBellatrix{Body: ð.BeaconBlockBodyBellatrix{ExecutionPayload: &v1.ExecutionPayload{}}}})
|
||||
require.NoError(t, err)
|
||||
_, _, err = (&Client{}).SubmitBlindedBlock(ctx, sbb, nil)
|
||||
_, _, err = (&Client{}).SubmitBlindedBlock(ctx, sbb)
|
||||
require.ErrorIs(t, err, errNotBlinded)
|
||||
})
|
||||
}
|
||||
@@ -753,91 +727,70 @@ func testSignedBlindedBeaconBlockCapella(t *testing.T) *eth.SignedBlindedBeaconB
|
||||
}
|
||||
}
|
||||
|
||||
func testSignedBlindedBeaconBlockAndBlobsDeneb(t *testing.T) *eth.SignedBlindedBeaconBlockAndBlobsDeneb {
|
||||
basebytes, err := shared.Uint256ToSSZBytes("14074904626401341155369551180448584754667373453244490859944217516317499064576")
|
||||
func testSignedBlindedBeaconBlockDeneb(t *testing.T) *eth.SignedBlindedBeaconBlockDeneb {
|
||||
basebytes, err := bytesutil.Uint256ToSSZBytes("14074904626401341155369551180448584754667373453244490859944217516317499064576")
|
||||
if err != nil {
|
||||
log.Error(err)
|
||||
}
|
||||
return ð.SignedBlindedBeaconBlockAndBlobsDeneb{
|
||||
SignedBlindedBlock: ð.SignedBlindedBeaconBlockDeneb{
|
||||
Message: ð.BlindedBeaconBlockDeneb{
|
||||
Slot: 1,
|
||||
ProposerIndex: 1,
|
||||
ParentRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
StateRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
Body: ð.BlindedBeaconBlockBodyDeneb{
|
||||
RandaoReveal: ezDecode(t, "0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505"),
|
||||
Eth1Data: ð.Eth1Data{
|
||||
DepositRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
DepositCount: 1,
|
||||
BlockHash: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
},
|
||||
Graffiti: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
ProposerSlashings: []*eth.ProposerSlashing{
|
||||
{
|
||||
Header_1: ð.SignedBeaconBlockHeader{
|
||||
Header: ð.BeaconBlockHeader{
|
||||
Slot: 1,
|
||||
ProposerIndex: 1,
|
||||
ParentRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
StateRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
BodyRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
},
|
||||
Signature: ezDecode(t, "0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505"),
|
||||
return ð.SignedBlindedBeaconBlockDeneb{
|
||||
Message: ð.BlindedBeaconBlockDeneb{
|
||||
Slot: 1,
|
||||
ProposerIndex: 1,
|
||||
ParentRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
StateRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
Body: ð.BlindedBeaconBlockBodyDeneb{
|
||||
RandaoReveal: ezDecode(t, "0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505"),
|
||||
Eth1Data: ð.Eth1Data{
|
||||
DepositRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
DepositCount: 1,
|
||||
BlockHash: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
},
|
||||
Graffiti: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
ProposerSlashings: []*eth.ProposerSlashing{
|
||||
{
|
||||
Header_1: ð.SignedBeaconBlockHeader{
|
||||
Header: ð.BeaconBlockHeader{
|
||||
Slot: 1,
|
||||
ProposerIndex: 1,
|
||||
ParentRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
StateRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
BodyRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
},
|
||||
Header_2: ð.SignedBeaconBlockHeader{
|
||||
Header: ð.BeaconBlockHeader{
|
||||
Slot: 1,
|
||||
ProposerIndex: 1,
|
||||
ParentRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
StateRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
BodyRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
},
|
||||
Signature: ezDecode(t, "0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505"),
|
||||
Signature: ezDecode(t, "0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505"),
|
||||
},
|
||||
Header_2: ð.SignedBeaconBlockHeader{
|
||||
Header: ð.BeaconBlockHeader{
|
||||
Slot: 1,
|
||||
ProposerIndex: 1,
|
||||
ParentRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
StateRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
BodyRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
},
|
||||
Signature: ezDecode(t, "0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505"),
|
||||
},
|
||||
},
|
||||
AttesterSlashings: []*eth.AttesterSlashing{
|
||||
{
|
||||
Attestation_1: ð.IndexedAttestation{
|
||||
AttestingIndices: []uint64{1},
|
||||
Data: ð.AttestationData{
|
||||
Slot: 1,
|
||||
CommitteeIndex: 1,
|
||||
BeaconBlockRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
Source: ð.Checkpoint{
|
||||
Epoch: 1,
|
||||
Root: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
},
|
||||
Target: ð.Checkpoint{
|
||||
Epoch: 1,
|
||||
Root: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
},
|
||||
},
|
||||
AttesterSlashings: []*eth.AttesterSlashing{
|
||||
{
|
||||
Attestation_1: ð.IndexedAttestation{
|
||||
AttestingIndices: []uint64{1},
|
||||
Data: ð.AttestationData{
|
||||
Slot: 1,
|
||||
CommitteeIndex: 1,
|
||||
BeaconBlockRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
Source: ð.Checkpoint{
|
||||
Epoch: 1,
|
||||
Root: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
},
|
||||
Signature: ezDecode(t, "0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505"),
|
||||
},
|
||||
Attestation_2: ð.IndexedAttestation{
|
||||
AttestingIndices: []uint64{1},
|
||||
Data: ð.AttestationData{
|
||||
Slot: 1,
|
||||
CommitteeIndex: 1,
|
||||
BeaconBlockRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
Source: ð.Checkpoint{
|
||||
Epoch: 1,
|
||||
Root: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
},
|
||||
Target: ð.Checkpoint{
|
||||
Epoch: 1,
|
||||
Root: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
},
|
||||
Target: ð.Checkpoint{
|
||||
Epoch: 1,
|
||||
Root: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
},
|
||||
Signature: ezDecode(t, "0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505"),
|
||||
},
|
||||
Signature: ezDecode(t, "0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505"),
|
||||
},
|
||||
},
|
||||
Attestations: []*eth.Attestation{
|
||||
{
|
||||
AggregationBits: bitfield.Bitlist{0x01},
|
||||
Attestation_2: ð.IndexedAttestation{
|
||||
AttestingIndices: []uint64{1},
|
||||
Data: ð.AttestationData{
|
||||
Slot: 1,
|
||||
CommitteeIndex: 1,
|
||||
@@ -854,68 +807,72 @@ func testSignedBlindedBeaconBlockAndBlobsDeneb(t *testing.T) *eth.SignedBlindedB
|
||||
Signature: ezDecode(t, "0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505"),
|
||||
},
|
||||
},
|
||||
Deposits: []*eth.Deposit{
|
||||
{
|
||||
Proof: [][]byte{ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2")},
|
||||
Data: ð.Deposit_Data{
|
||||
PublicKey: ezDecode(t, "0x93247f2209abcacf57b75a51dafae777f9dd38bc7053d1af526f220a7489a6d3a2753e5f3e8b1cfe39b56f43611df74a"),
|
||||
WithdrawalCredentials: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
Amount: 1,
|
||||
Signature: ezDecode(t, "0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505"),
|
||||
},
|
||||
Attestations: []*eth.Attestation{
|
||||
{
|
||||
AggregationBits: bitfield.Bitlist{0x01},
|
||||
Data: ð.AttestationData{
|
||||
Slot: 1,
|
||||
CommitteeIndex: 1,
|
||||
BeaconBlockRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
Source: ð.Checkpoint{
|
||||
Epoch: 1,
|
||||
Root: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
},
|
||||
Target: ð.Checkpoint{
|
||||
Epoch: 1,
|
||||
Root: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
},
|
||||
},
|
||||
Signature: ezDecode(t, "0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505"),
|
||||
},
|
||||
VoluntaryExits: []*eth.SignedVoluntaryExit{
|
||||
{
|
||||
Exit: ð.VoluntaryExit{
|
||||
Epoch: 1,
|
||||
ValidatorIndex: 1,
|
||||
},
|
||||
Signature: ezDecode(t, "0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505"),
|
||||
},
|
||||
Deposits: []*eth.Deposit{
|
||||
{
|
||||
Proof: [][]byte{ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2")},
|
||||
Data: ð.Deposit_Data{
|
||||
PublicKey: ezDecode(t, "0x93247f2209abcacf57b75a51dafae777f9dd38bc7053d1af526f220a7489a6d3a2753e5f3e8b1cfe39b56f43611df74a"),
|
||||
WithdrawalCredentials: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
Amount: 1,
|
||||
Signature: ezDecode(t, "0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505"),
|
||||
},
|
||||
},
|
||||
SyncAggregate: ð.SyncAggregate{
|
||||
SyncCommitteeSignature: make([]byte, 96),
|
||||
SyncCommitteeBits: ezDecode(t, "0x6451e9f951ebf05edc01de67e593484b672877054f055903ff0df1a1a945cf30ca26bb4d4b154f94a1bc776bcf5d0efb3603e1f9b8ee2499ccdcfe2a18cef458"),
|
||||
},
|
||||
ExecutionPayloadHeader: &v1.ExecutionPayloadHeaderDeneb{
|
||||
ParentHash: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
FeeRecipient: ezDecode(t, "0xabcf8e0d4e9587369b2301d0790347320302cc09"),
|
||||
StateRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
ReceiptsRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
LogsBloom: ezDecode(t, "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"),
|
||||
PrevRandao: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
BlockNumber: 1,
|
||||
GasLimit: 1,
|
||||
GasUsed: 1,
|
||||
Timestamp: 1,
|
||||
ExtraData: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
BaseFeePerGas: basebytes,
|
||||
BlockHash: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
TransactionsRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
WithdrawalsRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
BlobGasUsed: 1,
|
||||
ExcessBlobGas: 2,
|
||||
},
|
||||
VoluntaryExits: []*eth.SignedVoluntaryExit{
|
||||
{
|
||||
Exit: ð.VoluntaryExit{
|
||||
Epoch: 1,
|
||||
ValidatorIndex: 1,
|
||||
},
|
||||
Signature: ezDecode(t, "0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505"),
|
||||
},
|
||||
},
|
||||
},
|
||||
Signature: ezDecode(t, "0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505"),
|
||||
},
|
||||
SignedBlindedBlobSidecars: []*eth.SignedBlindedBlobSidecar{
|
||||
{
|
||||
Message: ð.BlindedBlobSidecar{
|
||||
BlockRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
Index: 0,
|
||||
Slot: 1,
|
||||
BlockParentRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
ProposerIndex: 1,
|
||||
BlobRoot: ezDecode(t, "0x24564723180fcb3d994104538d351c8dcbde12d541676bb736cf678018ca4739"),
|
||||
KzgCommitment: ezDecode(t, "0x8dab030c51e16e84be9caab84ee3d0b8bbec1db4a0e4de76439da8424d9b957370a10a78851f97e4b54d2ce1ab0d686f"),
|
||||
KzgProof: ezDecode(t, "0xb4021b0de10f743893d4f71e1bf830c019e832958efd6795baf2f83b8699a9eccc5dc99015d8d4d8ec370d0cc333c06a"),
|
||||
SyncAggregate: ð.SyncAggregate{
|
||||
SyncCommitteeSignature: make([]byte, 96),
|
||||
SyncCommitteeBits: ezDecode(t, "0x6451e9f951ebf05edc01de67e593484b672877054f055903ff0df1a1a945cf30ca26bb4d4b154f94a1bc776bcf5d0efb3603e1f9b8ee2499ccdcfe2a18cef458"),
|
||||
},
|
||||
ExecutionPayloadHeader: &v1.ExecutionPayloadHeaderDeneb{
|
||||
ParentHash: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
FeeRecipient: ezDecode(t, "0xabcf8e0d4e9587369b2301d0790347320302cc09"),
|
||||
StateRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
ReceiptsRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
LogsBloom: ezDecode(t, "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"),
|
||||
PrevRandao: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
BlockNumber: 1,
|
||||
GasLimit: 1,
|
||||
GasUsed: 1,
|
||||
Timestamp: 1,
|
||||
ExtraData: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
BaseFeePerGas: basebytes,
|
||||
BlockHash: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
TransactionsRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
WithdrawalsRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
BlobGasUsed: 1,
|
||||
ExcessBlobGas: 2,
|
||||
},
|
||||
Signature: ezDecode(t, "0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505"),
|
||||
},
|
||||
},
|
||||
Signature: ezDecode(t, "0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505"),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library")
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = ["mock.go"],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v4/api/client/builder/testing",
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/client/builder/testing",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//api/client/builder:go_default_library",
|
||||
|
||||
@@ -3,12 +3,12 @@ package testing
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/api/client/builder"
|
||||
"github.com/prysmaticlabs/prysm/v4/consensus-types/interfaces"
|
||||
"github.com/prysmaticlabs/prysm/v4/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v4/encoding/bytesutil"
|
||||
v1 "github.com/prysmaticlabs/prysm/v4/proto/engine/v1"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v4/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client/builder"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/interfaces"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v5/encoding/bytesutil"
|
||||
v1 "github.com/prysmaticlabs/prysm/v5/proto/engine/v1"
|
||||
ethpb "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
)
|
||||
|
||||
// MockClient is a mock implementation of BuilderClient.
|
||||
@@ -41,7 +41,7 @@ func (m MockClient) RegisterValidator(_ context.Context, svr []*ethpb.SignedVali
|
||||
}
|
||||
|
||||
// SubmitBlindedBlock --
|
||||
func (MockClient) SubmitBlindedBlock(_ context.Context, _ interfaces.ReadOnlySignedBeaconBlock, _ []*ethpb.SignedBlindedBlobSidecar) (interfaces.ExecutionData, *v1.BlobsBundle, error) {
|
||||
func (MockClient) SubmitBlindedBlock(_ context.Context, _ interfaces.ReadOnlySignedBeaconBlock) (interfaces.ExecutionData, *v1.BlobsBundle, error) {
|
||||
return nil, nil, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -8,12 +8,12 @@ import (
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/pkg/errors"
|
||||
fieldparams "github.com/prysmaticlabs/prysm/v4/config/fieldparams"
|
||||
types "github.com/prysmaticlabs/prysm/v4/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v4/encoding/bytesutil"
|
||||
"github.com/prysmaticlabs/prysm/v4/math"
|
||||
v1 "github.com/prysmaticlabs/prysm/v4/proto/engine/v1"
|
||||
eth "github.com/prysmaticlabs/prysm/v4/proto/prysm/v1alpha1"
|
||||
fieldparams "github.com/prysmaticlabs/prysm/v5/config/fieldparams"
|
||||
types "github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
"github.com/prysmaticlabs/prysm/v5/encoding/bytesutil"
|
||||
"github.com/prysmaticlabs/prysm/v5/math"
|
||||
v1 "github.com/prysmaticlabs/prysm/v5/proto/engine/v1"
|
||||
eth "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
)
|
||||
|
||||
var errInvalidUint256 = errors.New("invalid Uint256")
|
||||
@@ -117,7 +117,7 @@ type VersionResponse struct {
|
||||
Version string `json:"version"`
|
||||
}
|
||||
|
||||
// ExecHeaderResponse is a JSON representation of the builder API header response for Bellatrix.
|
||||
// ExecHeaderResponse is a JSON representation of the builder API header response for Bellatrix.
|
||||
type ExecHeaderResponse struct {
|
||||
Version string `json:"version"`
|
||||
Data struct {
|
||||
@@ -357,6 +357,45 @@ func FromProtoCapella(payload *v1.ExecutionPayloadCapella) (ExecutionPayloadCape
|
||||
}, nil
|
||||
}
|
||||
|
||||
func FromProtoDeneb(payload *v1.ExecutionPayloadDeneb) (ExecutionPayloadDeneb, error) {
|
||||
bFee, err := sszBytesToUint256(payload.BaseFeePerGas)
|
||||
if err != nil {
|
||||
return ExecutionPayloadDeneb{}, err
|
||||
}
|
||||
txs := make([]hexutil.Bytes, len(payload.Transactions))
|
||||
for i := range payload.Transactions {
|
||||
txs[i] = bytesutil.SafeCopyBytes(payload.Transactions[i])
|
||||
}
|
||||
withdrawals := make([]Withdrawal, len(payload.Withdrawals))
|
||||
for i, w := range payload.Withdrawals {
|
||||
withdrawals[i] = Withdrawal{
|
||||
Index: Uint256{Int: big.NewInt(0).SetUint64(w.Index)},
|
||||
ValidatorIndex: Uint256{Int: big.NewInt(0).SetUint64(uint64(w.ValidatorIndex))},
|
||||
Address: bytesutil.SafeCopyBytes(w.Address),
|
||||
Amount: Uint256{Int: big.NewInt(0).SetUint64(w.Amount)},
|
||||
}
|
||||
}
|
||||
return ExecutionPayloadDeneb{
|
||||
ParentHash: bytesutil.SafeCopyBytes(payload.ParentHash),
|
||||
FeeRecipient: bytesutil.SafeCopyBytes(payload.FeeRecipient),
|
||||
StateRoot: bytesutil.SafeCopyBytes(payload.StateRoot),
|
||||
ReceiptsRoot: bytesutil.SafeCopyBytes(payload.ReceiptsRoot),
|
||||
LogsBloom: bytesutil.SafeCopyBytes(payload.LogsBloom),
|
||||
PrevRandao: bytesutil.SafeCopyBytes(payload.PrevRandao),
|
||||
BlockNumber: Uint64String(payload.BlockNumber),
|
||||
GasLimit: Uint64String(payload.GasLimit),
|
||||
GasUsed: Uint64String(payload.GasUsed),
|
||||
Timestamp: Uint64String(payload.Timestamp),
|
||||
ExtraData: bytesutil.SafeCopyBytes(payload.ExtraData),
|
||||
BaseFeePerGas: bFee,
|
||||
BlockHash: bytesutil.SafeCopyBytes(payload.BlockHash),
|
||||
Transactions: txs,
|
||||
Withdrawals: withdrawals,
|
||||
BlobGasUsed: Uint64String(payload.BlobGasUsed),
|
||||
ExcessBlobGas: Uint64String(payload.ExcessBlobGas),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// ExecHeaderResponseCapella is the response of builder API /eth/v1/builder/header/{slot}/{parent_hash}/{pubkey} for Capella.
|
||||
type ExecHeaderResponseCapella struct {
|
||||
Data struct {
|
||||
@@ -869,16 +908,19 @@ func (bb *BuilderBidDeneb) ToProto() (*eth.BuilderBidDeneb, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var bundle *v1.BlindedBlobsBundle
|
||||
if bb.BlindedBlobsBundle != nil {
|
||||
bundle, err = bb.BlindedBlobsBundle.ToProto()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
if len(bb.BlobKzgCommitments) > fieldparams.MaxBlobsPerBlock {
|
||||
return nil, fmt.Errorf("too many blob commitments: %d", len(bb.BlobKzgCommitments))
|
||||
}
|
||||
kzgCommitments := make([][]byte, len(bb.BlobKzgCommitments))
|
||||
for i, commit := range bb.BlobKzgCommitments {
|
||||
if len(commit) != fieldparams.BLSPubkeyLength {
|
||||
return nil, fmt.Errorf("commitment length %d is not %d", len(commit), fieldparams.BLSPubkeyLength)
|
||||
}
|
||||
kzgCommitments[i] = bytesutil.SafeCopyBytes(commit)
|
||||
}
|
||||
return ð.BuilderBidDeneb{
|
||||
Header: header,
|
||||
BlindedBlobsBundle: bundle,
|
||||
BlobKzgCommitments: kzgCommitments,
|
||||
Value: bytesutil.SafeCopyBytes(bb.Value.SSZBytes()),
|
||||
Pubkey: bytesutil.SafeCopyBytes(bb.Pubkey),
|
||||
}, nil
|
||||
@@ -887,42 +929,11 @@ func (bb *BuilderBidDeneb) ToProto() (*eth.BuilderBidDeneb, error) {
|
||||
// BuilderBidDeneb is a field of ExecHeaderResponseDeneb.
|
||||
type BuilderBidDeneb struct {
|
||||
Header *ExecutionPayloadHeaderDeneb `json:"header"`
|
||||
BlindedBlobsBundle *BlindedBlobsBundle `json:"blinded_blobs_bundle"`
|
||||
BlobKzgCommitments []hexutil.Bytes `json:"blob_kzg_commitments"`
|
||||
Value Uint256 `json:"value"`
|
||||
Pubkey hexutil.Bytes `json:"pubkey"`
|
||||
}
|
||||
|
||||
// BlindedBlobsBundle is a field of BuilderBidDeneb and represents the blinded blobs of the associated header.
|
||||
type BlindedBlobsBundle struct {
|
||||
KzgCommitments []hexutil.Bytes `json:"commitments"`
|
||||
Proofs []hexutil.Bytes `json:"proofs"`
|
||||
BlobRoots []hexutil.Bytes `json:"blob_roots"`
|
||||
}
|
||||
|
||||
// ToProto creates a BlindedBlobsBundle Proto from BlindedBlobsBundle.
|
||||
func (r *BlindedBlobsBundle) ToProto() (*v1.BlindedBlobsBundle, error) {
|
||||
kzg := make([][]byte, len(r.KzgCommitments))
|
||||
for i := range kzg {
|
||||
kzg[i] = bytesutil.SafeCopyBytes(r.KzgCommitments[i])
|
||||
}
|
||||
|
||||
proofs := make([][]byte, len(r.Proofs))
|
||||
for i := range proofs {
|
||||
proofs[i] = bytesutil.SafeCopyBytes(r.Proofs[i])
|
||||
}
|
||||
|
||||
blobRoots := make([][]byte, len(r.BlobRoots))
|
||||
for i := range blobRoots {
|
||||
blobRoots[i] = bytesutil.SafeCopyBytes(r.BlobRoots[i])
|
||||
}
|
||||
|
||||
return &v1.BlindedBlobsBundle{
|
||||
KzgCommitments: kzg,
|
||||
Proofs: proofs,
|
||||
BlobRoots: blobRoots,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// ExecutionPayloadHeaderDeneb a field part of the BuilderBidDeneb.
|
||||
type ExecutionPayloadHeaderDeneb struct {
|
||||
ParentHash hexutil.Bytes `json:"parent_hash"`
|
||||
@@ -1052,6 +1063,16 @@ type BlobsBundle struct {
|
||||
|
||||
// ToProto returns a BlobsBundle Proto.
|
||||
func (b BlobsBundle) ToProto() (*v1.BlobsBundle, error) {
|
||||
if len(b.Blobs) > fieldparams.MaxBlobCommitmentsPerBlock {
|
||||
return nil, fmt.Errorf("blobs length %d is more than max %d", len(b.Blobs), fieldparams.MaxBlobCommitmentsPerBlock)
|
||||
}
|
||||
if len(b.Commitments) != len(b.Blobs) {
|
||||
return nil, fmt.Errorf("commitments length %d does not equal blobs length %d", len(b.Commitments), len(b.Blobs))
|
||||
}
|
||||
if len(b.Proofs) != len(b.Blobs) {
|
||||
return nil, fmt.Errorf("proofs length %d does not equal blobs length %d", len(b.Proofs), len(b.Blobs))
|
||||
}
|
||||
|
||||
commitments := make([][]byte, len(b.Commitments))
|
||||
for i := range b.Commitments {
|
||||
if len(b.Commitments[i]) != fieldparams.BLSPubkeyLength {
|
||||
@@ -1066,9 +1087,6 @@ func (b BlobsBundle) ToProto() (*v1.BlobsBundle, error) {
|
||||
}
|
||||
proofs[i] = bytesutil.SafeCopyBytes(b.Proofs[i])
|
||||
}
|
||||
if len(b.Blobs) > fieldparams.MaxBlobsPerBlock {
|
||||
return nil, fmt.Errorf("blobs length %d is more than max %d", len(b.Blobs), fieldparams.MaxBlobsPerBlock)
|
||||
}
|
||||
blobs := make([][]byte, len(b.Blobs))
|
||||
for i := range b.Blobs {
|
||||
if len(b.Blobs[i]) != fieldparams.BlobLength {
|
||||
@@ -1083,6 +1101,28 @@ func (b BlobsBundle) ToProto() (*v1.BlobsBundle, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
// FromBundleProto converts the proto bundle type to the builder
|
||||
// type.
|
||||
func FromBundleProto(bundle *v1.BlobsBundle) *BlobsBundle {
|
||||
commitments := make([]hexutil.Bytes, len(bundle.KzgCommitments))
|
||||
for i := range bundle.KzgCommitments {
|
||||
commitments[i] = bytesutil.SafeCopyBytes(bundle.KzgCommitments[i])
|
||||
}
|
||||
proofs := make([]hexutil.Bytes, len(bundle.Proofs))
|
||||
for i := range bundle.Proofs {
|
||||
proofs[i] = bytesutil.SafeCopyBytes(bundle.Proofs[i])
|
||||
}
|
||||
blobs := make([]hexutil.Bytes, len(bundle.Blobs))
|
||||
for i := range bundle.Blobs {
|
||||
blobs[i] = bytesutil.SafeCopyBytes(bundle.Blobs[i])
|
||||
}
|
||||
return &BlobsBundle{
|
||||
Commitments: commitments,
|
||||
Proofs: proofs,
|
||||
Blobs: blobs,
|
||||
}
|
||||
}
|
||||
|
||||
// ToProto returns ExecutionPayloadDeneb Proto and BlobsBundle Proto separately.
|
||||
func (r *ExecPayloadResponseDeneb) ToProto() (*v1.ExecutionPayloadDeneb, *v1.BlobsBundle, error) {
|
||||
if r.Data == nil {
|
||||
|
||||
@@ -13,13 +13,13 @@ import (
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/prysmaticlabs/go-bitfield"
|
||||
"github.com/prysmaticlabs/prysm/v4/beacon-chain/rpc/eth/shared"
|
||||
fieldparams "github.com/prysmaticlabs/prysm/v4/config/fieldparams"
|
||||
"github.com/prysmaticlabs/prysm/v4/math"
|
||||
v1 "github.com/prysmaticlabs/prysm/v4/proto/engine/v1"
|
||||
eth "github.com/prysmaticlabs/prysm/v4/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/require"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server/structs"
|
||||
fieldparams "github.com/prysmaticlabs/prysm/v5/config/fieldparams"
|
||||
"github.com/prysmaticlabs/prysm/v5/math"
|
||||
v1 "github.com/prysmaticlabs/prysm/v5/proto/engine/v1"
|
||||
eth "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
)
|
||||
|
||||
func ezDecode(t *testing.T, s string) []byte {
|
||||
@@ -38,8 +38,7 @@ func TestSignedValidatorRegistration_MarshalJSON(t *testing.T) {
|
||||
},
|
||||
Signature: make([]byte, 96),
|
||||
}
|
||||
a, err := shared.SignedValidatorRegistrationFromConsensus(svr)
|
||||
require.NoError(t, err)
|
||||
a := structs.SignedValidatorRegistrationFromConsensus(svr)
|
||||
je, err := json.Marshal(a)
|
||||
require.NoError(t, err)
|
||||
// decode with a struct w/ plain strings so we can check the string encoding of the hex fields
|
||||
@@ -56,7 +55,7 @@ func TestSignedValidatorRegistration_MarshalJSON(t *testing.T) {
|
||||
require.Equal(t, "0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", un.Message.Pubkey)
|
||||
|
||||
t.Run("roundtrip", func(t *testing.T) {
|
||||
b := &shared.SignedValidatorRegistration{}
|
||||
b := &structs.SignedValidatorRegistration{}
|
||||
if err := json.Unmarshal(je, b); err != nil {
|
||||
require.NoError(t, err)
|
||||
}
|
||||
@@ -142,17 +141,9 @@ var testExampleHeaderResponseDeneb = `{
|
||||
"blob_gas_used": "1",
|
||||
"excess_blob_gas": "2"
|
||||
},
|
||||
"blinded_blobs_bundle": {
|
||||
"commitments": [
|
||||
"blob_kzg_commitments": [
|
||||
"0x8dab030c51e16e84be9caab84ee3d0b8bbec1db4a0e4de76439da8424d9b957370a10a78851f97e4b54d2ce1ab0d686f"
|
||||
],
|
||||
"proofs": [
|
||||
"0xb4021b0de10f743893d4f71e1bf830c019e832958efd6795baf2f83b8699a9eccc5dc99015d8d4d8ec370d0cc333c06a"
|
||||
],
|
||||
"blob_roots": [
|
||||
"0x24564723180fcb3d994104538d351c8dcbde12d541676bb736cf678018ca4739"
|
||||
]
|
||||
},
|
||||
],
|
||||
"value": "652312848583266388373324160190187140051835877600158453279131187530910662656",
|
||||
"pubkey": "0x93247f2209abcacf57b75a51dafae777f9dd38bc7053d1af526f220a7489a6d3a2753e5f3e8b1cfe39b56f43611df74a"
|
||||
},
|
||||
@@ -218,6 +209,39 @@ var testExampleHeaderResponseUnknownVersion = `{
|
||||
}
|
||||
}`
|
||||
|
||||
var testExampleHeaderResponseDenebTooManyBlobs = `{
|
||||
"version": "deneb",
|
||||
"data": {
|
||||
"message": {
|
||||
"header": {
|
||||
"parent_hash": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"fee_recipient": "0xabcf8e0d4e9587369b2301d0790347320302cc09",
|
||||
"state_root": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"receipts_root": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"logs_bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
"prev_randao": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"block_number": "1",
|
||||
"gas_limit": "1",
|
||||
"gas_used": "1",
|
||||
"timestamp": "1",
|
||||
"extra_data": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"base_fee_per_gas": "452312848583266388373324160190187140051835877600158453279131187530910662656",
|
||||
"block_hash": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"transactions_root": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"withdrawals_root": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"blob_gas_used": "1",
|
||||
"excess_blob_gas": "2"
|
||||
},
|
||||
"blob_kzg_commitments": [
|
||||
"","","","","","",""
|
||||
],
|
||||
"value": "652312848583266388373324160190187140051835877600158453279131187530910662656",
|
||||
"pubkey": "0x93247f2209abcacf57b75a51dafae777f9dd38bc7053d1af526f220a7489a6d3a2753e5f3e8b1cfe39b56f43611df74a"
|
||||
},
|
||||
"signature": "0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505"
|
||||
}
|
||||
}`
|
||||
|
||||
func TestExecutionHeaderResponseUnmarshal(t *testing.T) {
|
||||
hr := &ExecHeaderResponse{}
|
||||
require.NoError(t, json.Unmarshal([]byte(testExampleHeaderResponse), hr))
|
||||
@@ -637,6 +661,151 @@ var testExampleExecutionPayloadDeneb = fmt.Sprintf(`{
|
||||
}
|
||||
}`, hexutil.Encode(make([]byte, fieldparams.BlobLength)))
|
||||
|
||||
var testExampleExecutionPayloadDenebTooManyBlobs = fmt.Sprintf(`{
|
||||
"version": "deneb",
|
||||
"data": {
|
||||
"execution_payload":{
|
||||
"parent_hash": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"fee_recipient": "0xabcf8e0d4e9587369b2301d0790347320302cc09",
|
||||
"state_root": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"receipts_root": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"logs_bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
"prev_randao": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"block_number": "1",
|
||||
"gas_limit": "1",
|
||||
"gas_used": "1",
|
||||
"timestamp": "1",
|
||||
"extra_data": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"base_fee_per_gas": "452312848583266388373324160190187140051835877600158453279131187530910662656",
|
||||
"block_hash": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"transactions": [
|
||||
"0x02f878831469668303f51d843b9ac9f9843b9aca0082520894c93269b73096998db66be0441e836d873535cb9c8894a19041886f000080c001a031cc29234036afbf9a1fb9476b463367cb1f957ac0b919b69bbc798436e604aaa018c4e9c3914eb27aadd0b91e10b18655739fcf8c1fc398763a9f1beecb8ddc86"
|
||||
],
|
||||
"withdrawals": [
|
||||
{
|
||||
"index": "1",
|
||||
"validator_index": "1",
|
||||
"address": "0xcf8e0d4e9587369b2301d0790347320302cc0943",
|
||||
"amount": "1"
|
||||
}
|
||||
],
|
||||
"blob_gas_used": "2",
|
||||
"excess_blob_gas": "3"
|
||||
},
|
||||
"blobs_bundle": {
|
||||
"commitments": [
|
||||
"0x8dab030c51e16e84be9caab84ee3d0b8bbec1db4a0e4de76439da8424d9b957370a10a78851f97e4b54d2ce1ab0d686f"
|
||||
],
|
||||
"proofs": [
|
||||
"0xb4021b0de10f743893d4f71e1bf830c019e832958efd6795baf2f83b8699a9eccc5dc99015d8d4d8ec370d0cc333c06a"
|
||||
],
|
||||
"blobs": %s
|
||||
}
|
||||
}
|
||||
}`, beyondMaxEmptyBlobs())
|
||||
|
||||
func beyondMaxEmptyBlobs() string {
|
||||
moreThanMax := fieldparams.MaxBlobCommitmentsPerBlock + 2
|
||||
blobs := make([]string, moreThanMax)
|
||||
b, err := json.Marshal(blobs)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return string(b)
|
||||
}
|
||||
|
||||
var testExampleExecutionPayloadDenebDifferentCommitmentCount = fmt.Sprintf(`{
|
||||
"version": "deneb",
|
||||
"data": {
|
||||
"execution_payload":{
|
||||
"parent_hash": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"fee_recipient": "0xabcf8e0d4e9587369b2301d0790347320302cc09",
|
||||
"state_root": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"receipts_root": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"logs_bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
"prev_randao": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"block_number": "1",
|
||||
"gas_limit": "1",
|
||||
"gas_used": "1",
|
||||
"timestamp": "1",
|
||||
"extra_data": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"base_fee_per_gas": "452312848583266388373324160190187140051835877600158453279131187530910662656",
|
||||
"block_hash": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"transactions": [
|
||||
"0x02f878831469668303f51d843b9ac9f9843b9aca0082520894c93269b73096998db66be0441e836d873535cb9c8894a19041886f000080c001a031cc29234036afbf9a1fb9476b463367cb1f957ac0b919b69bbc798436e604aaa018c4e9c3914eb27aadd0b91e10b18655739fcf8c1fc398763a9f1beecb8ddc86"
|
||||
],
|
||||
"withdrawals": [
|
||||
{
|
||||
"index": "1",
|
||||
"validator_index": "1",
|
||||
"address": "0xcf8e0d4e9587369b2301d0790347320302cc0943",
|
||||
"amount": "1"
|
||||
}
|
||||
],
|
||||
"blob_gas_used": "2",
|
||||
"excess_blob_gas": "3"
|
||||
},
|
||||
"blobs_bundle": {
|
||||
"commitments": [
|
||||
"0x8dab030c51e16e84be9caab84ee3d0b8bbec1db4a0e4de76439da8424d9b957370a10a78851f97e4b54d2ce1ab0d686f",
|
||||
"0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
|
||||
],
|
||||
"proofs": [
|
||||
"0xb4021b0de10f743893d4f71e1bf830c019e832958efd6795baf2f83b8699a9eccc5dc99015d8d4d8ec370d0cc333c06a"
|
||||
],
|
||||
"blobs": [
|
||||
"%s"
|
||||
]
|
||||
}
|
||||
}
|
||||
}`, hexutil.Encode(make([]byte, fieldparams.BlobLength)))
|
||||
|
||||
var testExampleExecutionPayloadDenebDifferentProofCount = fmt.Sprintf(`{
|
||||
"version": "deneb",
|
||||
"data": {
|
||||
"execution_payload":{
|
||||
"parent_hash": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"fee_recipient": "0xabcf8e0d4e9587369b2301d0790347320302cc09",
|
||||
"state_root": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"receipts_root": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"logs_bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
"prev_randao": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"block_number": "1",
|
||||
"gas_limit": "1",
|
||||
"gas_used": "1",
|
||||
"timestamp": "1",
|
||||
"extra_data": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"base_fee_per_gas": "452312848583266388373324160190187140051835877600158453279131187530910662656",
|
||||
"block_hash": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2",
|
||||
"transactions": [
|
||||
"0x02f878831469668303f51d843b9ac9f9843b9aca0082520894c93269b73096998db66be0441e836d873535cb9c8894a19041886f000080c001a031cc29234036afbf9a1fb9476b463367cb1f957ac0b919b69bbc798436e604aaa018c4e9c3914eb27aadd0b91e10b18655739fcf8c1fc398763a9f1beecb8ddc86"
|
||||
],
|
||||
"withdrawals": [
|
||||
{
|
||||
"index": "1",
|
||||
"validator_index": "1",
|
||||
"address": "0xcf8e0d4e9587369b2301d0790347320302cc0943",
|
||||
"amount": "1"
|
||||
}
|
||||
],
|
||||
"blob_gas_used": "2",
|
||||
"excess_blob_gas": "3"
|
||||
},
|
||||
"blobs_bundle": {
|
||||
"commitments": [
|
||||
"0x8dab030c51e16e84be9caab84ee3d0b8bbec1db4a0e4de76439da8424d9b957370a10a78851f97e4b54d2ce1ab0d686f"
|
||||
],
|
||||
"proofs": [
|
||||
"0xb4021b0de10f743893d4f71e1bf830c019e832958efd6795baf2f83b8699a9eccc5dc99015d8d4d8ec370d0cc333c06a",
|
||||
"0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
|
||||
],
|
||||
"blobs": [
|
||||
"%s"
|
||||
]
|
||||
}
|
||||
}
|
||||
}`, hexutil.Encode(make([]byte, fieldparams.BlobLength)))
|
||||
|
||||
func TestExecutionPayloadResponseUnmarshal(t *testing.T) {
|
||||
epr := &ExecPayloadResponse{}
|
||||
require.NoError(t, json.Unmarshal([]byte(testExampleExecutionPayload), epr))
|
||||
@@ -1013,7 +1182,6 @@ func TestExecutionPayloadResponseCapellaToProto(t *testing.T) {
|
||||
},
|
||||
}
|
||||
require.DeepEqual(t, expected, p)
|
||||
|
||||
}
|
||||
|
||||
func TestExecutionPayloadResponseDenebToProto(t *testing.T) {
|
||||
@@ -1092,7 +1260,27 @@ func TestExecutionPayloadResponseDenebToProto(t *testing.T) {
|
||||
}
|
||||
|
||||
require.DeepEqual(t, blobsBundle, expectedBlobs)
|
||||
}
|
||||
|
||||
func TestExecutionPayloadResponseDenebToProtoInvalidBlobCount(t *testing.T) {
|
||||
hr := &ExecPayloadResponseDeneb{}
|
||||
require.NoError(t, json.Unmarshal([]byte(testExampleExecutionPayloadDenebTooManyBlobs), hr))
|
||||
_, _, err := hr.ToProto()
|
||||
require.ErrorContains(t, fmt.Sprintf("blobs length %d is more than max %d", fieldparams.MaxBlobCommitmentsPerBlock+2, fieldparams.MaxBlobCommitmentsPerBlock), err)
|
||||
}
|
||||
|
||||
func TestExecutionPayloadResponseDenebToProtoDifferentCommitmentCount(t *testing.T) {
|
||||
hr := &ExecPayloadResponseDeneb{}
|
||||
require.NoError(t, json.Unmarshal([]byte(testExampleExecutionPayloadDenebDifferentCommitmentCount), hr))
|
||||
_, _, err := hr.ToProto()
|
||||
require.ErrorContains(t, "commitments length 2 does not equal blobs length 1", err)
|
||||
}
|
||||
|
||||
func TestExecutionPayloadResponseDenebToProtoDifferentProofCount(t *testing.T) {
|
||||
hr := &ExecPayloadResponseDeneb{}
|
||||
require.NoError(t, json.Unmarshal([]byte(testExampleExecutionPayloadDenebDifferentProofCount), hr))
|
||||
_, _, err := hr.ToProto()
|
||||
require.ErrorContains(t, "proofs length 2 does not equal blobs length 1", err)
|
||||
}
|
||||
|
||||
func pbEth1Data() *eth.Eth1Data {
|
||||
@@ -1530,7 +1718,7 @@ func TestUint256UnmarshalTooBig(t *testing.T) {
|
||||
func TestMarshalBlindedBeaconBlockBodyBellatrix(t *testing.T) {
|
||||
expected, err := os.ReadFile("testdata/blinded-block.json")
|
||||
require.NoError(t, err)
|
||||
b, err := shared.BlindedBeaconBlockBellatrixFromConsensus(ð.BlindedBeaconBlockBellatrix{
|
||||
b, err := structs.BlindedBeaconBlockBellatrixFromConsensus(ð.BlindedBeaconBlockBellatrix{
|
||||
Slot: 1,
|
||||
ProposerIndex: 1,
|
||||
ParentRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
@@ -1560,7 +1748,7 @@ func TestMarshalBlindedBeaconBlockBodyBellatrix(t *testing.T) {
|
||||
func TestMarshalBlindedBeaconBlockBodyCapella(t *testing.T) {
|
||||
expected, err := os.ReadFile("testdata/blinded-block-capella.json")
|
||||
require.NoError(t, err)
|
||||
b, err := shared.BlindedBeaconBlockCapellaFromConsensus(ð.BlindedBeaconBlockCapella{
|
||||
b, err := structs.BlindedBeaconBlockCapellaFromConsensus(ð.BlindedBeaconBlockCapella{
|
||||
Slot: 1,
|
||||
ProposerIndex: 1,
|
||||
ParentRoot: ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"),
|
||||
|
||||
@@ -4,7 +4,7 @@ import (
|
||||
"net/url"
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/require"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
)
|
||||
|
||||
func TestValidHostname(t *testing.T) {
|
||||
|
||||
@@ -21,6 +21,9 @@ var ErrNotFound = errors.Wrap(ErrNotOK, "recv 404 NotFound response from API")
|
||||
// ErrInvalidNodeVersion indicates that the /eth/v1/node/version API response format was not recognized.
|
||||
var ErrInvalidNodeVersion = errors.New("invalid node version response")
|
||||
|
||||
// ErrConnectionIssue represents a connection problem.
|
||||
var ErrConnectionIssue = errors.New("could not connect")
|
||||
|
||||
// Non200Err is a function that parses an HTTP response to handle responses that are not 200 with a formatted error.
|
||||
func Non200Err(response *http.Response) error {
|
||||
bodyBytes, err := io.ReadAll(response.Body)
|
||||
@@ -32,7 +35,7 @@ func Non200Err(response *http.Response) error {
|
||||
}
|
||||
msg := fmt.Sprintf("code=%d, url=%s, body=%s", response.StatusCode, response.Request.URL, body)
|
||||
switch response.StatusCode {
|
||||
case 404:
|
||||
case http.StatusNotFound:
|
||||
return errors.Wrap(ErrNotFound, msg)
|
||||
default:
|
||||
return errors.Wrap(ErrNotOK, msg)
|
||||
|
||||
@@ -2,25 +2,23 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"endpoint_factory.go",
|
||||
"structs.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v4/validator/rpc/apimiddleware",
|
||||
srcs = ["event_stream.go"],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/client/event",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//api/gateway/apimiddleware:go_default_library",
|
||||
"//api:go_default_library",
|
||||
"//api/client:go_default_library",
|
||||
"@com_github_pkg_errors//:go_default_library",
|
||||
"@com_github_sirupsen_logrus//:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "go_default_test",
|
||||
srcs = ["structs_test.go"],
|
||||
srcs = ["event_stream_test.go"],
|
||||
embed = [":go_default_library"],
|
||||
deps = [
|
||||
"//config/fieldparams:go_default_library",
|
||||
"//proto/eth/service:go_default_library",
|
||||
"//testing/require:go_default_library",
|
||||
"@com_github_sirupsen_logrus//:go_default_library",
|
||||
],
|
||||
)
|
||||
148
api/client/event/event_stream.go
Normal file
148
api/client/event/event_stream.go
Normal file
@@ -0,0 +1,148 @@
|
||||
package event
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v5/api"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
const (
|
||||
EventHead = "head"
|
||||
EventBlock = "block"
|
||||
EventAttestation = "attestation"
|
||||
EventVoluntaryExit = "voluntary_exit"
|
||||
EventBlsToExecutionChange = "bls_to_execution_change"
|
||||
EventProposerSlashing = "proposer_slashing"
|
||||
EventAttesterSlashing = "attester_slashing"
|
||||
EventFinalizedCheckpoint = "finalized_checkpoint"
|
||||
EventChainReorg = "chain_reorg"
|
||||
EventContributionAndProof = "contribution_and_proof"
|
||||
EventLightClientFinalityUpdate = "light_client_finality_update"
|
||||
EventLightClientOptimisticUpdate = "light_client_optimistic_update"
|
||||
EventPayloadAttributes = "payload_attributes"
|
||||
EventBlobSidecar = "blob_sidecar"
|
||||
EventError = "error"
|
||||
EventConnectionError = "connection_error"
|
||||
)
|
||||
|
||||
var (
|
||||
_ = EventStreamClient(&EventStream{})
|
||||
)
|
||||
|
||||
var DefaultEventTopics = []string{EventHead}
|
||||
|
||||
type EventStreamClient interface {
|
||||
Subscribe(eventsChannel chan<- *Event)
|
||||
}
|
||||
|
||||
type Event struct {
|
||||
EventType string
|
||||
Data []byte
|
||||
}
|
||||
|
||||
// EventStream is responsible for subscribing to the Beacon API events endpoint
|
||||
// and dispatching received events to subscribers.
|
||||
type EventStream struct {
|
||||
ctx context.Context
|
||||
httpClient *http.Client
|
||||
host string
|
||||
topics []string
|
||||
}
|
||||
|
||||
func NewEventStream(ctx context.Context, httpClient *http.Client, host string, topics []string) (*EventStream, error) {
|
||||
// Check if the host is a valid URL
|
||||
_, err := url.ParseRequestURI(host)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(topics) == 0 {
|
||||
return nil, errors.New("no topics provided")
|
||||
}
|
||||
|
||||
return &EventStream{
|
||||
ctx: ctx,
|
||||
httpClient: httpClient,
|
||||
host: host,
|
||||
topics: topics,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (h *EventStream) Subscribe(eventsChannel chan<- *Event) {
|
||||
allTopics := strings.Join(h.topics, ",")
|
||||
log.WithField("topics", allTopics).Info("Listening to Beacon API events")
|
||||
fullUrl := h.host + "/eth/v1/events?topics=" + allTopics
|
||||
req, err := http.NewRequestWithContext(h.ctx, http.MethodGet, fullUrl, nil)
|
||||
if err != nil {
|
||||
eventsChannel <- &Event{
|
||||
EventType: EventConnectionError,
|
||||
Data: []byte(errors.Wrap(err, "failed to create HTTP request").Error()),
|
||||
}
|
||||
}
|
||||
req.Header.Set("Accept", api.EventStreamMediaType)
|
||||
req.Header.Set("Connection", api.KeepAlive)
|
||||
resp, err := h.httpClient.Do(req)
|
||||
if err != nil {
|
||||
eventsChannel <- &Event{
|
||||
EventType: EventConnectionError,
|
||||
Data: []byte(errors.Wrap(err, client.ErrConnectionIssue.Error()).Error()),
|
||||
}
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if closeErr := resp.Body.Close(); closeErr != nil {
|
||||
log.WithError(closeErr).Error("Failed to close events response body")
|
||||
}
|
||||
}()
|
||||
// Create a new scanner to read lines from the response body
|
||||
scanner := bufio.NewScanner(resp.Body)
|
||||
|
||||
var eventType, data string // Variables to store event type and data
|
||||
|
||||
// Iterate over lines of the event stream
|
||||
for scanner.Scan() {
|
||||
select {
|
||||
case <-h.ctx.Done():
|
||||
log.Info("Context canceled, stopping event stream")
|
||||
close(eventsChannel)
|
||||
return
|
||||
default:
|
||||
line := scanner.Text() // TODO(13730): scanner does not handle /r and does not fully adhere to https://html.spec.whatwg.org/multipage/server-sent-events.html#the-eventsource-interface
|
||||
// Handle the event based on your specific format
|
||||
if line == "" {
|
||||
// Empty line indicates the end of an event
|
||||
if eventType != "" && data != "" {
|
||||
// Process the event when both eventType and data are set
|
||||
eventsChannel <- &Event{EventType: eventType, Data: []byte(data)}
|
||||
}
|
||||
|
||||
// Reset eventType and data for the next event
|
||||
eventType, data = "", ""
|
||||
continue
|
||||
}
|
||||
et, ok := strings.CutPrefix(line, "event: ")
|
||||
if ok {
|
||||
// Extract event type from the "event" field
|
||||
eventType = et
|
||||
}
|
||||
d, ok := strings.CutPrefix(line, "data: ")
|
||||
if ok {
|
||||
// Extract data from the "data" field
|
||||
data = d
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
eventsChannel <- &Event{
|
||||
EventType: EventConnectionError,
|
||||
Data: []byte(errors.Wrap(err, errors.Wrap(client.ErrConnectionIssue, "scanner failed").Error()).Error()),
|
||||
}
|
||||
}
|
||||
}
|
||||
80
api/client/event/event_stream_test.go
Normal file
80
api/client/event/event_stream_test.go
Normal file
@@ -0,0 +1,80 @@
|
||||
package event
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func TestNewEventStream(t *testing.T) {
|
||||
validURL := "http://localhost:8080"
|
||||
invalidURL := "://invalid"
|
||||
topics := []string{"topic1", "topic2"}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
host string
|
||||
topics []string
|
||||
wantErr bool
|
||||
}{
|
||||
{"Valid input", validURL, topics, false},
|
||||
{"Invalid URL", invalidURL, topics, true},
|
||||
{"No topics", validURL, []string{}, true},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
_, err := NewEventStream(context.Background(), &http.Client{}, tt.host, tt.topics)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("NewEventStream() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestEventStream(t *testing.T) {
|
||||
mux := http.NewServeMux()
|
||||
mux.HandleFunc("/eth/v1/events", func(w http.ResponseWriter, r *http.Request) {
|
||||
flusher, ok := w.(http.Flusher)
|
||||
require.Equal(t, true, ok)
|
||||
for i := 1; i <= 2; i++ {
|
||||
_, err := fmt.Fprintf(w, "event: head\ndata: data%d\n\n", i)
|
||||
require.NoError(t, err)
|
||||
flusher.Flush() // Trigger flush to simulate streaming data
|
||||
time.Sleep(100 * time.Millisecond) // Simulate delay between events
|
||||
}
|
||||
})
|
||||
server := httptest.NewServer(mux)
|
||||
defer server.Close()
|
||||
|
||||
topics := []string{"head"}
|
||||
eventsChannel := make(chan *Event, 1)
|
||||
stream, err := NewEventStream(context.Background(), http.DefaultClient, server.URL, topics)
|
||||
require.NoError(t, err)
|
||||
go stream.Subscribe(eventsChannel)
|
||||
|
||||
// Collect events
|
||||
var events []*Event
|
||||
|
||||
for len(events) != 2 {
|
||||
select {
|
||||
case event := <-eventsChannel:
|
||||
log.Info(event)
|
||||
events = append(events, event)
|
||||
}
|
||||
}
|
||||
|
||||
// Assertions to verify the events content
|
||||
expectedData := []string{"data1", "data2"}
|
||||
for i, event := range events {
|
||||
if string(event.Data) != expectedData[i] {
|
||||
t.Errorf("Expected event data %q, got %q", expectedData[i], string(event.Data))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,12 +3,11 @@ load("@prysm//tools/go:def.bzl", "go_library")
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = ["client.go"],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v4/api/client/validator",
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/client/validator",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//api/client:go_default_library",
|
||||
"//validator/rpc:go_default_library",
|
||||
"//validator/rpc/apimiddleware:go_default_library",
|
||||
"@com_github_pkg_errors//:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
@@ -7,9 +7,8 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v4/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v4/validator/rpc"
|
||||
"github.com/prysmaticlabs/prysm/v4/validator/rpc/apimiddleware"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/client"
|
||||
"github.com/prysmaticlabs/prysm/v5/validator/rpc"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -42,14 +41,14 @@ func (c *Client) GetValidatorPubKeys(ctx context.Context) ([]string, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(jsonlocal.Keystores) == 0 && len(jsonremote.Data) == 0 {
|
||||
if len(jsonlocal.Data) == 0 && len(jsonremote.Data) == 0 {
|
||||
return nil, errors.New("there are no local keys or remote keys on the validator")
|
||||
}
|
||||
|
||||
hexKeys := make(map[string]bool)
|
||||
|
||||
for index := range jsonlocal.Keystores {
|
||||
hexKeys[jsonlocal.Keystores[index].ValidatingPubkey] = true
|
||||
for index := range jsonlocal.Data {
|
||||
hexKeys[jsonlocal.Data[index].ValidatingPubkey] = true
|
||||
}
|
||||
for index := range jsonremote.Data {
|
||||
hexKeys[jsonremote.Data[index].Pubkey] = true
|
||||
@@ -62,12 +61,12 @@ func (c *Client) GetValidatorPubKeys(ctx context.Context) ([]string, error) {
|
||||
}
|
||||
|
||||
// GetLocalValidatorKeys calls the keymanager APIs for local validator keys
|
||||
func (c *Client) GetLocalValidatorKeys(ctx context.Context) (*apimiddleware.ListKeystoresResponseJson, error) {
|
||||
func (c *Client) GetLocalValidatorKeys(ctx context.Context) (*rpc.ListKeystoresResponse, error) {
|
||||
localBytes, err := c.Get(ctx, localKeysPath, client.WithAuthorizationToken(c.Token()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
jsonlocal := &apimiddleware.ListKeystoresResponseJson{}
|
||||
jsonlocal := &rpc.ListKeystoresResponse{}
|
||||
if err := json.Unmarshal(localBytes, jsonlocal); err != nil {
|
||||
return nil, errors.Wrap(err, "failed to parse local keystore list")
|
||||
}
|
||||
|
||||
7
api/constants.go
Normal file
7
api/constants.go
Normal file
@@ -0,0 +1,7 @@
|
||||
package api
|
||||
|
||||
const (
|
||||
WebUrlPrefix = "/v2/validator/"
|
||||
WebApiUrlPrefix = "/api/v2/validator/"
|
||||
KeymanagerApiPrefix = "/eth/v1"
|
||||
)
|
||||
@@ -8,22 +8,22 @@ go_library(
|
||||
"modifiers.go",
|
||||
"options.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v4/api/gateway",
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/gateway",
|
||||
visibility = [
|
||||
"//beacon-chain:__subpackages__",
|
||||
"//validator:__subpackages__",
|
||||
],
|
||||
deps = [
|
||||
"//api/gateway/apimiddleware:go_default_library",
|
||||
"//api/server:go_default_library",
|
||||
"//runtime:go_default_library",
|
||||
"@com_github_gorilla_mux//:go_default_library",
|
||||
"@com_github_grpc_ecosystem_grpc_gateway_v2//runtime:go_default_library",
|
||||
"@com_github_pkg_errors//:go_default_library",
|
||||
"@com_github_rs_cors//:go_default_library",
|
||||
"@com_github_sirupsen_logrus//:go_default_library",
|
||||
"@org_golang_google_grpc//:go_default_library",
|
||||
"@org_golang_google_grpc//connectivity:go_default_library",
|
||||
"@org_golang_google_grpc//credentials:go_default_library",
|
||||
"@org_golang_google_grpc//credentials/insecure:go_default_library",
|
||||
"@org_golang_google_protobuf//proto:go_default_library",
|
||||
],
|
||||
)
|
||||
@@ -33,7 +33,6 @@ go_test(
|
||||
srcs = ["gateway_test.go"],
|
||||
embed = [":go_default_library"],
|
||||
deps = [
|
||||
"//api/gateway/apimiddleware:go_default_library",
|
||||
"//cmd/beacon-chain/flags:go_default_library",
|
||||
"//testing/assert:go_default_library",
|
||||
"//testing/require:go_default_library",
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library", "go_test")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"api_middleware.go",
|
||||
"log.go",
|
||||
"param_handling.go",
|
||||
"process_field.go",
|
||||
"process_request.go",
|
||||
"structs.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v4/api/gateway/apimiddleware",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//api:go_default_library",
|
||||
"//api/grpc:go_default_library",
|
||||
"//encoding/bytesutil:go_default_library",
|
||||
"@com_github_ethereum_go_ethereum//common:go_default_library",
|
||||
"@com_github_ethereum_go_ethereum//common/hexutil:go_default_library",
|
||||
"@com_github_gorilla_mux//:go_default_library",
|
||||
"@com_github_pkg_errors//:go_default_library",
|
||||
"@com_github_sirupsen_logrus//:go_default_library",
|
||||
"@com_github_wealdtech_go_bytesutil//:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "go_default_test",
|
||||
srcs = [
|
||||
"param_handling_test.go",
|
||||
"process_request_test.go",
|
||||
],
|
||||
embed = [":go_default_library"],
|
||||
deps = [
|
||||
"//api:go_default_library",
|
||||
"//api/grpc:go_default_library",
|
||||
"//testing/assert:go_default_library",
|
||||
"//testing/require:go_default_library",
|
||||
"@com_github_gorilla_mux//:go_default_library",
|
||||
"@com_github_sirupsen_logrus//hooks/test:go_default_library",
|
||||
],
|
||||
)
|
||||
@@ -1,265 +0,0 @@
|
||||
package apimiddleware
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"reflect"
|
||||
"time"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
// ApiProxyMiddleware is a proxy between an Ethereum consensus API HTTP client and grpc-gateway.
|
||||
// The purpose of the proxy is to handle HTTP requests and gRPC responses in such a way that:
|
||||
// - Ethereum consensus API requests can be handled by grpc-gateway correctly
|
||||
// - gRPC responses can be returned as spec-compliant Ethereum consensus API responses
|
||||
type ApiProxyMiddleware struct {
|
||||
GatewayAddress string
|
||||
EndpointCreator EndpointFactory
|
||||
Timeout time.Duration
|
||||
router *mux.Router
|
||||
}
|
||||
|
||||
// EndpointFactory is responsible for creating new instances of Endpoint values.
|
||||
type EndpointFactory interface {
|
||||
Create(path string) (*Endpoint, error)
|
||||
Paths() []string
|
||||
IsNil() bool
|
||||
}
|
||||
|
||||
// Endpoint is a representation of an API HTTP endpoint that should be proxied by the middleware.
|
||||
type Endpoint struct {
|
||||
Path string // The path of the HTTP endpoint.
|
||||
GetResponse interface{} // The struct corresponding to the JSON structure used in a GET response.
|
||||
PostRequest interface{} // The struct corresponding to the JSON structure used in a POST request.
|
||||
PostResponse interface{} // The struct corresponding to the JSON structure used in a POST response.
|
||||
DeleteRequest interface{} // The struct corresponding to the JSON structure used in a DELETE request.
|
||||
DeleteResponse interface{} // The struct corresponding to the JSON structure used in a DELETE response.
|
||||
RequestURLLiterals []string // Names of URL parameters that should not be base64-encoded.
|
||||
RequestQueryParams []QueryParam // Query parameters of the request.
|
||||
Err ErrorJson // The struct corresponding to the error that should be returned in case of a request failure.
|
||||
Hooks HookCollection // A collection of functions that can be invoked at various stages of the request/response cycle.
|
||||
CustomHandlers []CustomHandler // Functions that will be executed instead of the default request/response behaviour.
|
||||
}
|
||||
|
||||
// RunDefault expresses whether the default processing logic should be carried out after running a pre hook.
|
||||
type RunDefault bool
|
||||
|
||||
// DefaultEndpoint returns an Endpoint with default configuration, e.g. DefaultErrorJson for error handling.
|
||||
func DefaultEndpoint() Endpoint {
|
||||
return Endpoint{
|
||||
Err: &DefaultErrorJson{},
|
||||
}
|
||||
}
|
||||
|
||||
// QueryParam represents a single query parameter's metadata.
|
||||
type QueryParam struct {
|
||||
Name string
|
||||
Hex bool
|
||||
Enum bool
|
||||
}
|
||||
|
||||
// CustomHandler is a function that can be invoked at the very beginning of the request,
|
||||
// essentially replacing the whole default request/response logic with custom logic for a specific endpoint.
|
||||
type CustomHandler = func(m *ApiProxyMiddleware, endpoint Endpoint, w http.ResponseWriter, req *http.Request) (handled bool)
|
||||
|
||||
// HookCollection contains hooks that can be used to amend the default request/response cycle with custom logic for a specific endpoint.
|
||||
type HookCollection struct {
|
||||
OnPreDeserializeRequestBodyIntoContainer func(endpoint *Endpoint, w http.ResponseWriter, req *http.Request) (RunDefault, ErrorJson)
|
||||
OnPostDeserializeRequestBodyIntoContainer func(endpoint *Endpoint, w http.ResponseWriter, req *http.Request) ErrorJson
|
||||
OnPreDeserializeGrpcResponseBodyIntoContainer func([]byte, interface{}) (RunDefault, ErrorJson)
|
||||
OnPreSerializeMiddlewareResponseIntoJson func(interface{}) (RunDefault, []byte, ErrorJson)
|
||||
}
|
||||
|
||||
// fieldProcessor applies the processing function f to a value when the tag is present on the field.
|
||||
type fieldProcessor struct {
|
||||
tag string
|
||||
f func(value reflect.Value) error
|
||||
}
|
||||
|
||||
// Run starts the proxy, registering all proxy endpoints.
|
||||
func (m *ApiProxyMiddleware) Run(gatewayRouter *mux.Router) {
|
||||
for _, path := range m.EndpointCreator.Paths() {
|
||||
gatewayRouter.HandleFunc(path, m.WithMiddleware(path))
|
||||
}
|
||||
m.router = gatewayRouter
|
||||
}
|
||||
|
||||
// ServeHTTP for the proxy middleware.
|
||||
func (m *ApiProxyMiddleware) ServeHTTP(w http.ResponseWriter, req *http.Request) {
|
||||
m.router.ServeHTTP(w, req)
|
||||
}
|
||||
|
||||
// WithMiddleware wraps the given endpoint handler with the middleware logic.
|
||||
func (m *ApiProxyMiddleware) WithMiddleware(path string) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, req *http.Request) {
|
||||
endpoint, err := m.EndpointCreator.Create(path)
|
||||
if err != nil {
|
||||
log.WithError(err).Errorf("Could not create endpoint for path: %s", path)
|
||||
return
|
||||
}
|
||||
|
||||
for _, handler := range endpoint.CustomHandlers {
|
||||
if handler(m, *endpoint, w, req) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if req.Method == "POST" {
|
||||
if errJson := handlePostRequestForEndpoint(endpoint, w, req); errJson != nil {
|
||||
WriteError(w, errJson, nil)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if req.Method == "DELETE" && req.Body != http.NoBody {
|
||||
if errJson := handleDeleteRequestForEndpoint(endpoint, req); errJson != nil {
|
||||
WriteError(w, errJson, nil)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if errJson := m.PrepareRequestForProxying(*endpoint, req); errJson != nil {
|
||||
WriteError(w, errJson, nil)
|
||||
return
|
||||
}
|
||||
grpcResp, errJson := m.ProxyRequest(req)
|
||||
if errJson != nil {
|
||||
WriteError(w, errJson, nil)
|
||||
return
|
||||
}
|
||||
grpcRespBody, errJson := ReadGrpcResponseBody(grpcResp.Body)
|
||||
if errJson != nil {
|
||||
WriteError(w, errJson, nil)
|
||||
return
|
||||
}
|
||||
|
||||
var respJson []byte
|
||||
if !GrpcResponseIsEmpty(grpcRespBody) {
|
||||
respHasError, errJson := HandleGrpcResponseError(endpoint.Err, grpcResp, grpcRespBody, w)
|
||||
if errJson != nil {
|
||||
WriteError(w, errJson, nil)
|
||||
return
|
||||
}
|
||||
if respHasError {
|
||||
return
|
||||
}
|
||||
|
||||
var resp interface{}
|
||||
if req.Method == "GET" {
|
||||
resp = endpoint.GetResponse
|
||||
} else if req.Method == "DELETE" {
|
||||
resp = endpoint.DeleteResponse
|
||||
} else {
|
||||
resp = endpoint.PostResponse
|
||||
}
|
||||
if errJson := deserializeGrpcResponseBodyIntoContainerWrapped(endpoint, grpcRespBody, resp); errJson != nil {
|
||||
WriteError(w, errJson, nil)
|
||||
return
|
||||
}
|
||||
if errJson := ProcessMiddlewareResponseFields(resp); errJson != nil {
|
||||
WriteError(w, errJson, nil)
|
||||
return
|
||||
}
|
||||
|
||||
respJson, errJson = serializeMiddlewareResponseIntoJsonWrapped(endpoint, respJson, resp)
|
||||
if errJson != nil {
|
||||
WriteError(w, errJson, nil)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if errJson := WriteMiddlewareResponseHeadersAndBody(grpcResp, respJson, w); errJson != nil {
|
||||
WriteError(w, errJson, nil)
|
||||
return
|
||||
}
|
||||
if errJson := Cleanup(grpcResp.Body); errJson != nil {
|
||||
WriteError(w, errJson, nil)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func handlePostRequestForEndpoint(endpoint *Endpoint, w http.ResponseWriter, req *http.Request) ErrorJson {
|
||||
if errJson := deserializeRequestBodyIntoContainerWrapped(endpoint, req, w); errJson != nil {
|
||||
return errJson
|
||||
}
|
||||
if errJson := ProcessRequestContainerFields(endpoint.PostRequest); errJson != nil {
|
||||
return errJson
|
||||
}
|
||||
return SetRequestBodyToRequestContainer(endpoint.PostRequest, req)
|
||||
}
|
||||
|
||||
func handleDeleteRequestForEndpoint(endpoint *Endpoint, req *http.Request) ErrorJson {
|
||||
if errJson := DeserializeRequestBodyIntoContainer(req.Body, endpoint.DeleteRequest); errJson != nil {
|
||||
return errJson
|
||||
}
|
||||
if errJson := ProcessRequestContainerFields(endpoint.DeleteRequest); errJson != nil {
|
||||
return errJson
|
||||
}
|
||||
return SetRequestBodyToRequestContainer(endpoint.DeleteRequest, req)
|
||||
}
|
||||
|
||||
func deserializeRequestBodyIntoContainerWrapped(endpoint *Endpoint, req *http.Request, w http.ResponseWriter) ErrorJson {
|
||||
runDefault := true
|
||||
if endpoint.Hooks.OnPreDeserializeRequestBodyIntoContainer != nil {
|
||||
run, errJson := endpoint.Hooks.OnPreDeserializeRequestBodyIntoContainer(endpoint, w, req)
|
||||
if errJson != nil {
|
||||
return errJson
|
||||
}
|
||||
if !run {
|
||||
runDefault = false
|
||||
}
|
||||
}
|
||||
if runDefault {
|
||||
if errJson := DeserializeRequestBodyIntoContainer(req.Body, endpoint.PostRequest); errJson != nil {
|
||||
return errJson
|
||||
}
|
||||
}
|
||||
if endpoint.Hooks.OnPostDeserializeRequestBodyIntoContainer != nil {
|
||||
if errJson := endpoint.Hooks.OnPostDeserializeRequestBodyIntoContainer(endpoint, w, req); errJson != nil {
|
||||
return errJson
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func deserializeGrpcResponseBodyIntoContainerWrapped(endpoint *Endpoint, grpcResponseBody []byte, resp interface{}) ErrorJson {
|
||||
runDefault := true
|
||||
if endpoint.Hooks.OnPreDeserializeGrpcResponseBodyIntoContainer != nil {
|
||||
run, errJson := endpoint.Hooks.OnPreDeserializeGrpcResponseBodyIntoContainer(grpcResponseBody, resp)
|
||||
if errJson != nil {
|
||||
return errJson
|
||||
}
|
||||
if !run {
|
||||
runDefault = false
|
||||
}
|
||||
}
|
||||
if runDefault {
|
||||
if errJson := DeserializeGrpcResponseBodyIntoContainer(grpcResponseBody, resp); errJson != nil {
|
||||
return errJson
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func serializeMiddlewareResponseIntoJsonWrapped(endpoint *Endpoint, respJson []byte, resp interface{}) ([]byte, ErrorJson) {
|
||||
runDefault := true
|
||||
var errJson ErrorJson
|
||||
if endpoint.Hooks.OnPreSerializeMiddlewareResponseIntoJson != nil {
|
||||
var run RunDefault
|
||||
run, respJson, errJson = endpoint.Hooks.OnPreSerializeMiddlewareResponseIntoJson(resp)
|
||||
if errJson != nil {
|
||||
return nil, errJson
|
||||
}
|
||||
if !run {
|
||||
runDefault = false
|
||||
}
|
||||
}
|
||||
if runDefault {
|
||||
respJson, errJson = SerializeMiddlewareResponseIntoJson(resp)
|
||||
if errJson != nil {
|
||||
return nil, errJson
|
||||
}
|
||||
}
|
||||
return respJson, nil
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
package apimiddleware
|
||||
|
||||
import "github.com/sirupsen/logrus"
|
||||
|
||||
var log = logrus.WithField("prefix", "apimiddleware")
|
||||
@@ -1,103 +0,0 @@
|
||||
package apimiddleware
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
butil "github.com/prysmaticlabs/prysm/v4/encoding/bytesutil"
|
||||
"github.com/wealdtech/go-bytesutil"
|
||||
)
|
||||
|
||||
// HandleURLParameters processes URL parameters, allowing parameterized URLs to be safely and correctly proxied to grpc-gateway.
|
||||
func HandleURLParameters(url string, req *http.Request, literals []string) ErrorJson {
|
||||
segments := strings.Split(url, "/")
|
||||
|
||||
segmentsLoop:
|
||||
for i, s := range segments {
|
||||
// We only care about segments which are parameterized.
|
||||
if isRequestParam(s) {
|
||||
// Don't do anything with parameters which should be forwarded literally to gRPC.
|
||||
for _, l := range literals {
|
||||
if s == "{"+l+"}" {
|
||||
continue segmentsLoop
|
||||
}
|
||||
}
|
||||
|
||||
routeVar := mux.Vars(req)[s[1:len(s)-1]]
|
||||
bRouteVar := []byte(routeVar)
|
||||
if butil.IsHex(bRouteVar) {
|
||||
var err error
|
||||
bRouteVar, err = bytesutil.FromHexString(string(bRouteVar))
|
||||
if err != nil {
|
||||
return InternalServerErrorWithMessage(err, "could not process URL parameter")
|
||||
}
|
||||
}
|
||||
// Converting hex to base64 may result in a value which malforms the URL.
|
||||
// We use URLEncoding to safely escape such values.
|
||||
base64RouteVar := base64.URLEncoding.EncodeToString(bRouteVar)
|
||||
|
||||
// Merge segments back into the full URL.
|
||||
splitPath := strings.Split(req.URL.Path, "/")
|
||||
splitPath[i] = base64RouteVar
|
||||
req.URL.Path = strings.Join(splitPath, "/")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// HandleQueryParameters processes query parameters, allowing them to be safely and correctly proxied to grpc-gateway.
|
||||
func HandleQueryParameters(req *http.Request, params []QueryParam) ErrorJson {
|
||||
queryParams := req.URL.Query()
|
||||
|
||||
normalizeQueryValues(queryParams)
|
||||
|
||||
for key, vals := range queryParams {
|
||||
for _, p := range params {
|
||||
if key == p.Name {
|
||||
if p.Hex {
|
||||
queryParams.Del(key)
|
||||
for _, v := range vals {
|
||||
b := []byte(v)
|
||||
if butil.IsHex(b) {
|
||||
var err error
|
||||
b, err = bytesutil.FromHexString(v)
|
||||
if err != nil {
|
||||
return InternalServerErrorWithMessage(err, "could not process query parameter")
|
||||
}
|
||||
}
|
||||
queryParams.Add(key, base64.URLEncoding.EncodeToString(b))
|
||||
}
|
||||
}
|
||||
if p.Enum {
|
||||
queryParams.Del(key)
|
||||
for _, v := range vals {
|
||||
// gRPC expects uppercase enum values.
|
||||
queryParams.Add(key, strings.ToUpper(v))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
req.URL.RawQuery = queryParams.Encode()
|
||||
return nil
|
||||
}
|
||||
|
||||
// isRequestParam verifies whether the passed string is a request parameter.
|
||||
// Request parameters are enclosed in { and }.
|
||||
func isRequestParam(s string) bool {
|
||||
return len(s) > 2 && s[0] == '{' && s[len(s)-1] == '}'
|
||||
}
|
||||
|
||||
func normalizeQueryValues(queryParams url.Values) {
|
||||
// Replace comma-separated values with individual values.
|
||||
for key, vals := range queryParams {
|
||||
splitVals := make([]string, 0)
|
||||
for _, v := range vals {
|
||||
splitVals = append(splitVals, strings.Split(v, ",")...)
|
||||
}
|
||||
queryParams[key] = splitVals
|
||||
}
|
||||
}
|
||||
@@ -1,124 +0,0 @@
|
||||
package apimiddleware
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/require"
|
||||
)
|
||||
|
||||
func TestHandleURLParameters(t *testing.T) {
|
||||
var body bytes.Buffer
|
||||
|
||||
t.Run("no_params", func(t *testing.T) {
|
||||
request := httptest.NewRequest("GET", "http://foo.example/bar", &body)
|
||||
|
||||
errJson := HandleURLParameters("/not_param", request, []string{})
|
||||
require.Equal(t, true, errJson == nil)
|
||||
assert.Equal(t, "/bar", request.URL.Path)
|
||||
})
|
||||
|
||||
t.Run("with_params", func(t *testing.T) {
|
||||
muxVars := make(map[string]string)
|
||||
muxVars["bar_param"] = "bar"
|
||||
muxVars["quux_param"] = "quux"
|
||||
request := httptest.NewRequest("GET", "http://foo.example/bar/baz/quux", &body)
|
||||
request = mux.SetURLVars(request, muxVars)
|
||||
|
||||
errJson := HandleURLParameters("/{bar_param}/not_param/{quux_param}", request, []string{})
|
||||
require.Equal(t, true, errJson == nil)
|
||||
assert.Equal(t, "/YmFy/baz/cXV1eA==", request.URL.Path)
|
||||
})
|
||||
|
||||
t.Run("with_literal", func(t *testing.T) {
|
||||
muxVars := make(map[string]string)
|
||||
muxVars["bar_param"] = "bar"
|
||||
request := httptest.NewRequest("GET", "http://foo.example/bar/baz", &body)
|
||||
request = mux.SetURLVars(request, muxVars)
|
||||
|
||||
errJson := HandleURLParameters("/{bar_param}/not_param/", request, []string{"bar_param"})
|
||||
require.Equal(t, true, errJson == nil)
|
||||
assert.Equal(t, "/bar/baz", request.URL.Path)
|
||||
})
|
||||
|
||||
t.Run("with_hex", func(t *testing.T) {
|
||||
muxVars := make(map[string]string)
|
||||
muxVars["hex_param"] = "0x626172"
|
||||
request := httptest.NewRequest("GET", "http://foo.example/0x626172/baz", &body)
|
||||
request = mux.SetURLVars(request, muxVars)
|
||||
|
||||
errJson := HandleURLParameters("/{hex_param}/not_param/", request, []string{})
|
||||
require.Equal(t, true, errJson == nil)
|
||||
assert.Equal(t, "/YmFy/baz", request.URL.Path)
|
||||
})
|
||||
}
|
||||
|
||||
func TestHandleQueryParameters(t *testing.T) {
|
||||
var body bytes.Buffer
|
||||
|
||||
t.Run("regular_params", func(t *testing.T) {
|
||||
request := httptest.NewRequest("GET", "http://foo.example?bar=bar&baz=baz", &body)
|
||||
|
||||
errJson := HandleQueryParameters(request, []QueryParam{{Name: "bar"}, {Name: "baz"}})
|
||||
require.Equal(t, true, errJson == nil)
|
||||
query := request.URL.Query()
|
||||
v, ok := query["bar"]
|
||||
require.Equal(t, true, ok, "query param not found")
|
||||
require.Equal(t, 1, len(v), "wrong number of query param values")
|
||||
assert.Equal(t, "bar", v[0])
|
||||
v, ok = query["baz"]
|
||||
require.Equal(t, true, ok, "query param not found")
|
||||
require.Equal(t, 1, len(v), "wrong number of query param values")
|
||||
assert.Equal(t, "baz", v[0])
|
||||
})
|
||||
|
||||
t.Run("hex_and_enum_params", func(t *testing.T) {
|
||||
request := httptest.NewRequest("GET", "http://foo.example?hex=0x626172&baz=baz", &body)
|
||||
|
||||
errJson := HandleQueryParameters(request, []QueryParam{{Name: "hex", Hex: true}, {Name: "baz", Enum: true}})
|
||||
require.Equal(t, true, errJson == nil)
|
||||
query := request.URL.Query()
|
||||
v, ok := query["hex"]
|
||||
require.Equal(t, true, ok, "query param not found")
|
||||
require.Equal(t, 1, len(v), "wrong number of query param values")
|
||||
assert.Equal(t, "YmFy", v[0])
|
||||
v, ok = query["baz"]
|
||||
require.Equal(t, true, ok, "query param not found")
|
||||
require.Equal(t, 1, len(v), "wrong number of query param values")
|
||||
assert.Equal(t, "BAZ", v[0])
|
||||
})
|
||||
}
|
||||
|
||||
func TestIsRequestParam(t *testing.T) {
|
||||
tests := []struct {
|
||||
s string
|
||||
b bool
|
||||
}{
|
||||
{"", false},
|
||||
{"{", false},
|
||||
{"}", false},
|
||||
{"{}", false},
|
||||
{"{x}", true},
|
||||
{"{very_long_parameter_name_with_underscores}", true},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
b := isRequestParam(tt.s)
|
||||
assert.Equal(t, tt.b, b)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeQueryValues(t *testing.T) {
|
||||
input := make(map[string][]string)
|
||||
input["key"] = []string{"value1", "value2,value3,value4", "value5"}
|
||||
|
||||
normalizeQueryValues(input)
|
||||
require.Equal(t, 5, len(input["key"]))
|
||||
assert.Equal(t, "value1", input["key"][0])
|
||||
assert.Equal(t, "value2", input["key"][1])
|
||||
assert.Equal(t, "value3", input["key"][2])
|
||||
assert.Equal(t, "value4", input["key"][3])
|
||||
assert.Equal(t, "value5", input["key"][4])
|
||||
}
|
||||
@@ -1,179 +0,0 @@
|
||||
package apimiddleware
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common"
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/wealdtech/go-bytesutil"
|
||||
)
|
||||
|
||||
// processField calls each processor function on any field that has the matching tag set.
|
||||
// It is a recursive function.
|
||||
func processField(s interface{}, processors []fieldProcessor) error {
|
||||
kind := reflect.TypeOf(s).Kind()
|
||||
if kind != reflect.Ptr && kind != reflect.Slice && kind != reflect.Array {
|
||||
return fmt.Errorf("processing fields of kind '%v' is unsupported", kind)
|
||||
}
|
||||
|
||||
t := reflect.TypeOf(s).Elem()
|
||||
v := reflect.Indirect(reflect.ValueOf(s))
|
||||
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
switch v.Field(i).Kind() {
|
||||
case reflect.Slice:
|
||||
sliceElem := t.Field(i).Type.Elem()
|
||||
kind := sliceElem.Kind()
|
||||
// Recursively process slices to struct pointers.
|
||||
switch {
|
||||
case kind == reflect.Ptr && sliceElem.Elem().Kind() == reflect.Struct:
|
||||
for j := 0; j < v.Field(i).Len(); j++ {
|
||||
if err := processField(v.Field(i).Index(j).Interface(), processors); err != nil {
|
||||
return errors.Wrapf(err, "could not process field '%s'", t.Field(i).Name)
|
||||
}
|
||||
}
|
||||
// Process each string in string slices.
|
||||
case kind == reflect.String:
|
||||
for _, proc := range processors {
|
||||
_, hasTag := t.Field(i).Tag.Lookup(proc.tag)
|
||||
if !hasTag {
|
||||
continue
|
||||
}
|
||||
for j := 0; j < v.Field(i).Len(); j++ {
|
||||
if err := proc.f(v.Field(i).Index(j)); err != nil {
|
||||
return errors.Wrapf(err, "could not process field '%s'", t.Field(i).Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Recursively process struct pointers.
|
||||
case reflect.Ptr:
|
||||
if v.Field(i).Elem().Kind() == reflect.Struct {
|
||||
if err := processField(v.Field(i).Interface(), processors); err != nil {
|
||||
return errors.Wrapf(err, "could not process field '%s'", t.Field(i).Name)
|
||||
}
|
||||
}
|
||||
default:
|
||||
field := t.Field(i)
|
||||
for _, proc := range processors {
|
||||
if _, hasTag := field.Tag.Lookup(proc.tag); hasTag {
|
||||
if err := proc.f(v.Field(i)); err != nil {
|
||||
return errors.Wrapf(err, "could not process field '%s'", t.Field(i).Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func hexToBase64Processor(v reflect.Value) error {
|
||||
if v.String() == "0x" {
|
||||
v.SetString("")
|
||||
return nil
|
||||
}
|
||||
b, err := bytesutil.FromHexString(v.String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.SetString(base64.StdEncoding.EncodeToString(b))
|
||||
return nil
|
||||
}
|
||||
|
||||
func base64ToHexProcessor(v reflect.Value) error {
|
||||
if v.String() == "" {
|
||||
// Empty hex values are represented as "0x".
|
||||
v.SetString("0x")
|
||||
return nil
|
||||
}
|
||||
b, err := base64.StdEncoding.DecodeString(v.String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.SetString(hexutil.Encode(b))
|
||||
return nil
|
||||
}
|
||||
|
||||
func base64ToChecksumAddressProcessor(v reflect.Value) error {
|
||||
if v.String() == "" {
|
||||
// Empty hex values are represented as "0x".
|
||||
v.SetString("0x")
|
||||
return nil
|
||||
}
|
||||
b, err := base64.StdEncoding.DecodeString(v.String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.SetString(common.BytesToAddress(b).Hex())
|
||||
return nil
|
||||
}
|
||||
|
||||
func base64ToUint256Processor(v reflect.Value) error {
|
||||
if v.String() == "" {
|
||||
return nil
|
||||
}
|
||||
littleEndian, err := base64.StdEncoding.DecodeString(v.String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(littleEndian) != 32 {
|
||||
return errors.New("invalid length for Uint256")
|
||||
}
|
||||
|
||||
// Integers are stored as little-endian, but
|
||||
// big.Int expects big-endian. So we need to reverse
|
||||
// the byte order before decoding.
|
||||
var bigEndian [32]byte
|
||||
for i := 0; i < len(littleEndian); i++ {
|
||||
bigEndian[i] = littleEndian[len(littleEndian)-1-i]
|
||||
}
|
||||
var uint256 big.Int
|
||||
uint256.SetBytes(bigEndian[:])
|
||||
v.SetString(uint256.String())
|
||||
return nil
|
||||
}
|
||||
|
||||
func uint256ToBase64Processor(v reflect.Value) error {
|
||||
if v.String() == "" {
|
||||
return nil
|
||||
}
|
||||
uint256, ok := new(big.Int).SetString(v.String(), 10)
|
||||
if !ok {
|
||||
return fmt.Errorf("could not parse Uint256")
|
||||
}
|
||||
bigEndian := uint256.Bytes()
|
||||
if len(bigEndian) > 32 {
|
||||
return fmt.Errorf("number too big for Uint256")
|
||||
}
|
||||
|
||||
// Integers are stored as little-endian, but
|
||||
// big.Int gives big-endian. So we need to reverse
|
||||
// the byte order before encoding.
|
||||
var littleEndian [32]byte
|
||||
for i := 0; i < len(bigEndian); i++ {
|
||||
littleEndian[i] = bigEndian[len(bigEndian)-1-i]
|
||||
}
|
||||
v.SetString(base64.StdEncoding.EncodeToString(littleEndian[:]))
|
||||
return nil
|
||||
}
|
||||
|
||||
func enumToLowercaseProcessor(v reflect.Value) error {
|
||||
v.SetString(strings.ToLower(v.String()))
|
||||
return nil
|
||||
}
|
||||
|
||||
func timeToUnixProcessor(v reflect.Value) error {
|
||||
t, err := time.Parse(time.RFC3339, v.String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.SetString(strconv.FormatUint(uint64(t.Unix()), 10))
|
||||
return nil
|
||||
}
|
||||
@@ -1,283 +0,0 @@
|
||||
package apimiddleware
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v4/api"
|
||||
"github.com/prysmaticlabs/prysm/v4/api/grpc"
|
||||
)
|
||||
|
||||
// DeserializeRequestBodyIntoContainer deserializes the request's body into an endpoint-specific struct.
|
||||
func DeserializeRequestBodyIntoContainer(body io.Reader, requestContainer interface{}) ErrorJson {
|
||||
decoder := json.NewDecoder(body)
|
||||
decoder.DisallowUnknownFields()
|
||||
if err := decoder.Decode(&requestContainer); err != nil {
|
||||
if strings.Contains(err.Error(), "json: unknown field") {
|
||||
e := errors.Wrap(err, "could not decode request body")
|
||||
return &DefaultErrorJson{
|
||||
Message: e.Error(),
|
||||
Code: http.StatusBadRequest,
|
||||
}
|
||||
}
|
||||
return InternalServerErrorWithMessage(err, "could not decode request body")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ProcessRequestContainerFields processes fields of an endpoint-specific container according to field tags.
|
||||
func ProcessRequestContainerFields(requestContainer interface{}) ErrorJson {
|
||||
if err := processField(requestContainer, []fieldProcessor{
|
||||
{
|
||||
tag: "hex",
|
||||
f: hexToBase64Processor,
|
||||
},
|
||||
{
|
||||
tag: "uint256",
|
||||
f: uint256ToBase64Processor,
|
||||
},
|
||||
}); err != nil {
|
||||
return InternalServerErrorWithMessage(err, "could not process request data")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// SetRequestBodyToRequestContainer makes the endpoint-specific container the new body of the request.
|
||||
func SetRequestBodyToRequestContainer(requestContainer interface{}, req *http.Request) ErrorJson {
|
||||
// Serialize the struct, which now includes a base64-encoded value, into JSON.
|
||||
j, err := json.Marshal(requestContainer)
|
||||
if err != nil {
|
||||
return InternalServerErrorWithMessage(err, "could not marshal request")
|
||||
}
|
||||
// Set the body to the new JSON.
|
||||
req.Body = io.NopCloser(bytes.NewReader(j))
|
||||
req.Header.Set("Content-Length", strconv.Itoa(len(j)))
|
||||
req.ContentLength = int64(len(j))
|
||||
return nil
|
||||
}
|
||||
|
||||
// PrepareRequestForProxying applies additional logic to the request so that it can be correctly proxied to grpc-gateway.
|
||||
func (m *ApiProxyMiddleware) PrepareRequestForProxying(endpoint Endpoint, req *http.Request) ErrorJson {
|
||||
req.URL.Scheme = "http"
|
||||
req.URL.Host = m.GatewayAddress
|
||||
req.RequestURI = ""
|
||||
if errJson := HandleURLParameters(endpoint.Path, req, endpoint.RequestURLLiterals); errJson != nil {
|
||||
return errJson
|
||||
}
|
||||
if errJson := HandleQueryParameters(req, endpoint.RequestQueryParams); errJson != nil {
|
||||
return errJson
|
||||
}
|
||||
// We have to add the prefix after handling parameters because adding the prefix changes URL segment indexing.
|
||||
req.URL.Path = "/internal" + req.URL.Path
|
||||
return nil
|
||||
}
|
||||
|
||||
// ProxyRequest proxies the request to grpc-gateway.
|
||||
func (m *ApiProxyMiddleware) ProxyRequest(req *http.Request) (*http.Response, ErrorJson) {
|
||||
// We do not use http.DefaultClient because it does not have any timeout.
|
||||
netClient := &http.Client{Timeout: m.Timeout}
|
||||
grpcResp, err := netClient.Do(req)
|
||||
if err != nil {
|
||||
if err, ok := err.(net.Error); ok && err.Timeout() {
|
||||
return nil, TimeoutError()
|
||||
}
|
||||
return nil, InternalServerErrorWithMessage(err, "could not proxy request")
|
||||
}
|
||||
if grpcResp == nil {
|
||||
return nil, &DefaultErrorJson{Message: "nil response from gRPC-gateway", Code: http.StatusInternalServerError}
|
||||
}
|
||||
return grpcResp, nil
|
||||
}
|
||||
|
||||
// ReadGrpcResponseBody reads the body from the grpc-gateway's response.
|
||||
func ReadGrpcResponseBody(r io.Reader) ([]byte, ErrorJson) {
|
||||
body, err := io.ReadAll(r)
|
||||
if err != nil {
|
||||
return nil, InternalServerErrorWithMessage(err, "could not read response body")
|
||||
}
|
||||
return body, nil
|
||||
}
|
||||
|
||||
// HandleGrpcResponseError acts on an error that resulted from a grpc-gateway's response.
|
||||
// Whether there was an error is indicated by the bool return value. In case of an error,
|
||||
// there is no need to write to the response because it's taken care of by the function.
|
||||
func HandleGrpcResponseError(errJson ErrorJson, resp *http.Response, respBody []byte, w http.ResponseWriter) (bool, ErrorJson) {
|
||||
responseHasError := false
|
||||
if err := json.Unmarshal(respBody, errJson); err != nil {
|
||||
return false, InternalServerErrorWithMessage(err, "could not unmarshal error")
|
||||
}
|
||||
if errJson.Msg() != "" {
|
||||
responseHasError = true
|
||||
// Something went wrong, but the request completed, meaning we can write headers and the error message.
|
||||
for h, vs := range resp.Header {
|
||||
for _, v := range vs {
|
||||
if strings.HasSuffix(h, api.VersionHeader) {
|
||||
w.Header().Set(api.VersionHeader, v)
|
||||
} else {
|
||||
w.Header().Set(h, v)
|
||||
}
|
||||
}
|
||||
}
|
||||
// Handle gRPC timeout.
|
||||
if resp.StatusCode == http.StatusGatewayTimeout {
|
||||
WriteError(w, TimeoutError(), resp.Header)
|
||||
} else {
|
||||
// Set code to HTTP code because unmarshalled body contained gRPC code.
|
||||
errJson.SetCode(resp.StatusCode)
|
||||
WriteError(w, errJson, resp.Header)
|
||||
}
|
||||
}
|
||||
return responseHasError, nil
|
||||
}
|
||||
|
||||
// GrpcResponseIsEmpty determines whether the grpc-gateway's response body contains no data.
|
||||
func GrpcResponseIsEmpty(grpcResponseBody []byte) bool {
|
||||
return len(grpcResponseBody) == 0 || string(grpcResponseBody) == "{}"
|
||||
}
|
||||
|
||||
// DeserializeGrpcResponseBodyIntoContainer deserializes the grpc-gateway's response body into an endpoint-specific struct.
|
||||
func DeserializeGrpcResponseBodyIntoContainer(body []byte, responseContainer interface{}) ErrorJson {
|
||||
if err := json.Unmarshal(body, &responseContainer); err != nil {
|
||||
return InternalServerErrorWithMessage(err, "could not unmarshal response")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ProcessMiddlewareResponseFields processes fields of an endpoint-specific container according to field tags.
|
||||
func ProcessMiddlewareResponseFields(responseContainer interface{}) ErrorJson {
|
||||
if err := processField(responseContainer, []fieldProcessor{
|
||||
{
|
||||
tag: "hex",
|
||||
f: base64ToHexProcessor,
|
||||
},
|
||||
{
|
||||
tag: "address",
|
||||
f: base64ToChecksumAddressProcessor,
|
||||
},
|
||||
{
|
||||
tag: "enum",
|
||||
f: enumToLowercaseProcessor,
|
||||
},
|
||||
{
|
||||
tag: "time",
|
||||
f: timeToUnixProcessor,
|
||||
},
|
||||
{
|
||||
tag: "uint256",
|
||||
f: base64ToUint256Processor,
|
||||
},
|
||||
}); err != nil {
|
||||
return InternalServerErrorWithMessage(err, "could not process response data")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// SerializeMiddlewareResponseIntoJson serializes the endpoint-specific response struct into a JSON representation.
|
||||
func SerializeMiddlewareResponseIntoJson(responseContainer interface{}) (jsonResponse []byte, errJson ErrorJson) {
|
||||
j, err := json.Marshal(responseContainer)
|
||||
if err != nil {
|
||||
return nil, InternalServerErrorWithMessage(err, "could not marshal response")
|
||||
}
|
||||
return j, nil
|
||||
}
|
||||
|
||||
// WriteMiddlewareResponseHeadersAndBody populates headers and the body of the final response.
|
||||
func WriteMiddlewareResponseHeadersAndBody(grpcResp *http.Response, responseJson []byte, w http.ResponseWriter) ErrorJson {
|
||||
var statusCodeHeader string
|
||||
for h, vs := range grpcResp.Header {
|
||||
// We don't want to expose any gRPC metadata in the HTTP response, so we skip forwarding metadata headers.
|
||||
if strings.HasPrefix(h, grpc.MetadataPrefix) {
|
||||
if h == grpc.WithPrefix(grpc.HttpCodeMetadataKey) {
|
||||
statusCodeHeader = vs[0]
|
||||
} else if strings.HasSuffix(h, api.VersionHeader) {
|
||||
w.Header().Set(api.VersionHeader, vs[0])
|
||||
}
|
||||
} else {
|
||||
for _, v := range vs {
|
||||
w.Header().Set(h, v)
|
||||
}
|
||||
}
|
||||
}
|
||||
if !GrpcResponseIsEmpty(responseJson) {
|
||||
w.Header().Set("Content-Length", strconv.Itoa(len(responseJson)))
|
||||
if statusCodeHeader != "" {
|
||||
code, err := strconv.Atoi(statusCodeHeader)
|
||||
if err != nil {
|
||||
return InternalServerErrorWithMessage(err, "could not parse status code")
|
||||
}
|
||||
w.WriteHeader(code)
|
||||
} else {
|
||||
w.WriteHeader(grpcResp.StatusCode)
|
||||
}
|
||||
if _, err := io.Copy(w, io.NopCloser(bytes.NewReader(responseJson))); err != nil {
|
||||
return InternalServerErrorWithMessage(err, "could not write response message")
|
||||
}
|
||||
} else {
|
||||
w.Header().Set("Content-Length", "0")
|
||||
w.WriteHeader(grpcResp.StatusCode)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// WriteError writes the error by manipulating headers and the body of the final response.
|
||||
func WriteError(w http.ResponseWriter, errJson ErrorJson, responseHeader http.Header) {
|
||||
// Include custom error in the error JSON.
|
||||
hasCustomError := false
|
||||
if responseHeader != nil {
|
||||
customError, ok := responseHeader[grpc.WithPrefix(grpc.CustomErrorMetadataKey)]
|
||||
if ok {
|
||||
hasCustomError = true
|
||||
// Assume header has only one value and read the 0 index.
|
||||
if err := json.Unmarshal([]byte(customError[0]), errJson); err != nil {
|
||||
log.WithError(err).Error("Could not unmarshal custom error message")
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var j []byte
|
||||
if hasCustomError {
|
||||
var err error
|
||||
j, err = json.Marshal(errJson)
|
||||
if err != nil {
|
||||
log.WithError(err).Error("Could not marshal error message")
|
||||
return
|
||||
}
|
||||
} else {
|
||||
var err error
|
||||
// We marshal the response body into a DefaultErrorJson if the custom error is not present.
|
||||
// This is because the ErrorJson argument is the endpoint's error definition, which may contain custom fields.
|
||||
// In such a scenario marhaling the endpoint's error would populate the resulting JSON
|
||||
// with these fields even if they are not present in the gRPC header.
|
||||
d := &DefaultErrorJson{
|
||||
Message: errJson.Msg(),
|
||||
Code: errJson.StatusCode(),
|
||||
}
|
||||
j, err = json.Marshal(d)
|
||||
if err != nil {
|
||||
log.WithError(err).Error("Could not marshal error message")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Length", strconv.Itoa(len(j)))
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(errJson.StatusCode())
|
||||
if _, err := io.Copy(w, io.NopCloser(bytes.NewReader(j))); err != nil {
|
||||
log.WithError(err).Error("Could not write error message")
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup performs final cleanup on the initial response from grpc-gateway.
|
||||
func Cleanup(grpcResponseBody io.ReadCloser) ErrorJson {
|
||||
if err := grpcResponseBody.Close(); err != nil {
|
||||
return InternalServerErrorWithMessage(err, "could not close response body")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -1,435 +0,0 @@
|
||||
package apimiddleware
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/api"
|
||||
"github.com/prysmaticlabs/prysm/v4/api/grpc"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/require"
|
||||
"github.com/sirupsen/logrus/hooks/test"
|
||||
)
|
||||
|
||||
type testRequestContainer struct {
|
||||
TestString string
|
||||
TestHexString string `hex:"true"`
|
||||
TestEmptyHexString string `hex:"true"`
|
||||
TestUint256String string `uint256:"true"`
|
||||
}
|
||||
|
||||
func defaultRequestContainer() *testRequestContainer {
|
||||
return &testRequestContainer{
|
||||
TestString: "test string",
|
||||
TestHexString: "0x666F6F", // hex encoding of "foo"
|
||||
TestEmptyHexString: "0x",
|
||||
TestUint256String: "4196",
|
||||
}
|
||||
}
|
||||
|
||||
type testResponseContainer struct {
|
||||
TestString string
|
||||
TestHex string `hex:"true"`
|
||||
TestEmptyHex string `hex:"true"`
|
||||
TestAddress string `address:"true"`
|
||||
TestEmptyAddress string `address:"true"`
|
||||
TestUint256 string `uint256:"true"`
|
||||
TestEnum string `enum:"true"`
|
||||
TestTime string `time:"true"`
|
||||
}
|
||||
|
||||
func defaultResponseContainer() *testResponseContainer {
|
||||
return &testResponseContainer{
|
||||
TestString: "test string",
|
||||
TestHex: "Zm9v", // base64 encoding of "foo"
|
||||
TestEmptyHex: "",
|
||||
TestAddress: "Zm9v",
|
||||
TestEmptyAddress: "",
|
||||
TestEnum: "Test Enum",
|
||||
TestTime: "2006-01-02T15:04:05Z",
|
||||
|
||||
// base64 encoding of 4196 in little-endian
|
||||
TestUint256: "ZBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=",
|
||||
}
|
||||
}
|
||||
|
||||
type testErrorJson struct {
|
||||
Message string
|
||||
Code int
|
||||
CustomField string
|
||||
}
|
||||
|
||||
// StatusCode returns the error's underlying error code.
|
||||
func (e *testErrorJson) StatusCode() int {
|
||||
return e.Code
|
||||
}
|
||||
|
||||
// Msg returns the error's underlying message.
|
||||
func (e *testErrorJson) Msg() string {
|
||||
return e.Message
|
||||
}
|
||||
|
||||
// SetCode sets the error's underlying error code.
|
||||
func (e *testErrorJson) SetCode(code int) {
|
||||
e.Code = code
|
||||
}
|
||||
|
||||
// SetMsg sets the error's underlying message.
|
||||
func (e *testErrorJson) SetMsg(msg string) {
|
||||
e.Message = msg
|
||||
}
|
||||
|
||||
func TestDeserializeRequestBodyIntoContainer(t *testing.T) {
|
||||
t.Run("ok", func(t *testing.T) {
|
||||
var bodyJson bytes.Buffer
|
||||
err := json.NewEncoder(&bodyJson).Encode(defaultRequestContainer())
|
||||
require.NoError(t, err)
|
||||
|
||||
container := &testRequestContainer{}
|
||||
errJson := DeserializeRequestBodyIntoContainer(&bodyJson, container)
|
||||
require.Equal(t, true, errJson == nil)
|
||||
assert.Equal(t, "test string", container.TestString)
|
||||
})
|
||||
|
||||
t.Run("error", func(t *testing.T) {
|
||||
var bodyJson bytes.Buffer
|
||||
bodyJson.Write([]byte("foo"))
|
||||
errJson := DeserializeRequestBodyIntoContainer(&bodyJson, &testRequestContainer{})
|
||||
require.NotNil(t, errJson)
|
||||
assert.Equal(t, true, strings.Contains(errJson.Msg(), "could not decode request body"))
|
||||
assert.Equal(t, http.StatusInternalServerError, errJson.StatusCode())
|
||||
})
|
||||
|
||||
t.Run("unknown field", func(t *testing.T) {
|
||||
var bodyJson bytes.Buffer
|
||||
bodyJson.Write([]byte("{\"foo\":\"foo\"}"))
|
||||
errJson := DeserializeRequestBodyIntoContainer(&bodyJson, &testRequestContainer{})
|
||||
require.NotNil(t, errJson)
|
||||
assert.Equal(t, true, strings.Contains(errJson.Msg(), "could not decode request body"))
|
||||
assert.Equal(t, http.StatusBadRequest, errJson.StatusCode())
|
||||
})
|
||||
}
|
||||
|
||||
func TestProcessRequestContainerFields(t *testing.T) {
|
||||
t.Run("ok", func(t *testing.T) {
|
||||
container := defaultRequestContainer()
|
||||
|
||||
errJson := ProcessRequestContainerFields(container)
|
||||
require.Equal(t, true, errJson == nil)
|
||||
assert.Equal(t, "Zm9v", container.TestHexString)
|
||||
assert.Equal(t, "", container.TestEmptyHexString)
|
||||
assert.Equal(t, "ZBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=", container.TestUint256String)
|
||||
})
|
||||
|
||||
t.Run("error", func(t *testing.T) {
|
||||
errJson := ProcessRequestContainerFields("foo")
|
||||
require.NotNil(t, errJson)
|
||||
assert.Equal(t, true, strings.Contains(errJson.Msg(), "could not process request data"))
|
||||
assert.Equal(t, http.StatusInternalServerError, errJson.StatusCode())
|
||||
})
|
||||
}
|
||||
|
||||
func TestSetRequestBodyToRequestContainer(t *testing.T) {
|
||||
var body bytes.Buffer
|
||||
request := httptest.NewRequest("GET", "http://foo.example", &body)
|
||||
|
||||
errJson := SetRequestBodyToRequestContainer(defaultRequestContainer(), request)
|
||||
require.Equal(t, true, errJson == nil)
|
||||
container := &testRequestContainer{}
|
||||
require.NoError(t, json.NewDecoder(request.Body).Decode(container))
|
||||
assert.Equal(t, "test string", container.TestString)
|
||||
contentLengthHeader, ok := request.Header["Content-Length"]
|
||||
require.Equal(t, true, ok)
|
||||
require.Equal(t, 1, len(contentLengthHeader), "wrong number of header values")
|
||||
assert.Equal(t, "108", contentLengthHeader[0])
|
||||
assert.Equal(t, int64(108), request.ContentLength)
|
||||
}
|
||||
|
||||
func TestPrepareRequestForProxying(t *testing.T) {
|
||||
middleware := &ApiProxyMiddleware{
|
||||
GatewayAddress: "http://gateway.example",
|
||||
}
|
||||
// We will set some params to make the request more interesting.
|
||||
endpoint := Endpoint{
|
||||
Path: "/{url_param}",
|
||||
RequestURLLiterals: []string{"url_param"},
|
||||
RequestQueryParams: []QueryParam{{Name: "query_param"}},
|
||||
}
|
||||
var body bytes.Buffer
|
||||
request := httptest.NewRequest("GET", "http://foo.example?query_param=bar", &body)
|
||||
|
||||
errJson := middleware.PrepareRequestForProxying(endpoint, request)
|
||||
require.Equal(t, true, errJson == nil)
|
||||
assert.Equal(t, "http", request.URL.Scheme)
|
||||
assert.Equal(t, middleware.GatewayAddress, request.URL.Host)
|
||||
assert.Equal(t, "", request.RequestURI)
|
||||
}
|
||||
|
||||
func TestReadGrpcResponseBody(t *testing.T) {
|
||||
var b bytes.Buffer
|
||||
b.Write([]byte("foo"))
|
||||
|
||||
body, jsonErr := ReadGrpcResponseBody(&b)
|
||||
require.Equal(t, true, jsonErr == nil)
|
||||
assert.Equal(t, "foo", string(body))
|
||||
}
|
||||
|
||||
func TestHandleGrpcResponseError(t *testing.T) {
|
||||
response := &http.Response{
|
||||
StatusCode: 400,
|
||||
Header: http.Header{
|
||||
"Foo": []string{"foo"},
|
||||
"Bar": []string{"bar"},
|
||||
},
|
||||
}
|
||||
writer := httptest.NewRecorder()
|
||||
errJson := &testErrorJson{
|
||||
Message: "foo",
|
||||
Code: 400,
|
||||
}
|
||||
b, err := json.Marshal(errJson)
|
||||
require.NoError(t, err)
|
||||
|
||||
hasError, e := HandleGrpcResponseError(errJson, response, b, writer)
|
||||
require.Equal(t, true, e == nil)
|
||||
assert.Equal(t, true, hasError)
|
||||
v, ok := writer.Header()["Foo"]
|
||||
require.Equal(t, true, ok, "header not found")
|
||||
require.Equal(t, 1, len(v), "wrong number of header values")
|
||||
assert.Equal(t, "foo", v[0])
|
||||
v, ok = writer.Header()["Bar"]
|
||||
require.Equal(t, true, ok, "header not found")
|
||||
require.Equal(t, 1, len(v), "wrong number of header values")
|
||||
assert.Equal(t, "bar", v[0])
|
||||
assert.Equal(t, 400, errJson.StatusCode())
|
||||
}
|
||||
|
||||
func TestGrpcResponseIsEmpty(t *testing.T) {
|
||||
t.Run("nil", func(t *testing.T) {
|
||||
assert.Equal(t, true, GrpcResponseIsEmpty(nil))
|
||||
})
|
||||
t.Run("empty_slice", func(t *testing.T) {
|
||||
assert.Equal(t, true, GrpcResponseIsEmpty(make([]byte, 0)))
|
||||
})
|
||||
t.Run("empty_brackets", func(t *testing.T) {
|
||||
assert.Equal(t, true, GrpcResponseIsEmpty([]byte("{}")))
|
||||
})
|
||||
t.Run("non_empty", func(t *testing.T) {
|
||||
assert.Equal(t, false, GrpcResponseIsEmpty([]byte("{\"foo\":\"bar\"})")))
|
||||
})
|
||||
}
|
||||
|
||||
func TestDeserializeGrpcResponseBodyIntoContainer(t *testing.T) {
|
||||
t.Run("ok", func(t *testing.T) {
|
||||
body, err := json.Marshal(defaultRequestContainer())
|
||||
require.NoError(t, err)
|
||||
|
||||
container := &testRequestContainer{}
|
||||
errJson := DeserializeGrpcResponseBodyIntoContainer(body, container)
|
||||
require.Equal(t, true, errJson == nil)
|
||||
assert.Equal(t, "test string", container.TestString)
|
||||
})
|
||||
|
||||
t.Run("error", func(t *testing.T) {
|
||||
var bodyJson bytes.Buffer
|
||||
bodyJson.Write([]byte("foo"))
|
||||
errJson := DeserializeGrpcResponseBodyIntoContainer(bodyJson.Bytes(), &testRequestContainer{})
|
||||
require.NotNil(t, errJson)
|
||||
assert.Equal(t, true, strings.Contains(errJson.Msg(), "could not unmarshal response"))
|
||||
assert.Equal(t, http.StatusInternalServerError, errJson.StatusCode())
|
||||
})
|
||||
}
|
||||
|
||||
func TestProcessMiddlewareResponseFields(t *testing.T) {
|
||||
t.Run("Ok", func(t *testing.T) {
|
||||
container := defaultResponseContainer()
|
||||
|
||||
errJson := ProcessMiddlewareResponseFields(container)
|
||||
require.Equal(t, true, errJson == nil)
|
||||
assert.Equal(t, "0x666f6f", container.TestHex)
|
||||
assert.Equal(t, "0x", container.TestEmptyHex)
|
||||
assert.Equal(t, "0x0000000000000000000000000000000000666F6f", container.TestAddress)
|
||||
assert.Equal(t, "0x", container.TestEmptyAddress)
|
||||
assert.Equal(t, "4196", container.TestUint256)
|
||||
assert.Equal(t, "test enum", container.TestEnum)
|
||||
assert.Equal(t, "1136214245", container.TestTime)
|
||||
})
|
||||
|
||||
t.Run("error", func(t *testing.T) {
|
||||
errJson := ProcessMiddlewareResponseFields("foo")
|
||||
require.NotNil(t, errJson)
|
||||
assert.Equal(t, true, strings.Contains(errJson.Msg(), "could not process response data"))
|
||||
assert.Equal(t, http.StatusInternalServerError, errJson.StatusCode())
|
||||
})
|
||||
}
|
||||
|
||||
func TestSerializeMiddlewareResponseIntoJson(t *testing.T) {
|
||||
container := defaultResponseContainer()
|
||||
j, errJson := SerializeMiddlewareResponseIntoJson(container)
|
||||
assert.Equal(t, true, errJson == nil)
|
||||
cToDeserialize := &testResponseContainer{}
|
||||
require.NoError(t, json.Unmarshal(j, cToDeserialize))
|
||||
assert.Equal(t, "test string", cToDeserialize.TestString)
|
||||
}
|
||||
|
||||
func TestWriteMiddlewareResponseHeadersAndBody(t *testing.T) {
|
||||
t.Run("GET", func(t *testing.T) {
|
||||
response := &http.Response{
|
||||
Header: http.Header{
|
||||
"Foo": []string{"foo"},
|
||||
grpc.WithPrefix(grpc.HttpCodeMetadataKey): []string{"204"},
|
||||
grpc.WithPrefix(api.VersionHeader): []string{"capella"},
|
||||
},
|
||||
}
|
||||
container := defaultResponseContainer()
|
||||
responseJson, err := json.Marshal(container)
|
||||
require.NoError(t, err)
|
||||
writer := httptest.NewRecorder()
|
||||
writer.Body = &bytes.Buffer{}
|
||||
|
||||
errJson := WriteMiddlewareResponseHeadersAndBody(response, responseJson, writer)
|
||||
require.Equal(t, true, errJson == nil)
|
||||
v, ok := writer.Header()["Foo"]
|
||||
require.Equal(t, true, ok, "header not found")
|
||||
require.Equal(t, 1, len(v), "wrong number of header values")
|
||||
assert.Equal(t, "foo", v[0])
|
||||
v, ok = writer.Header()["Content-Length"]
|
||||
require.Equal(t, true, ok, "header not found")
|
||||
require.Equal(t, 1, len(v), "wrong number of header values")
|
||||
assert.Equal(t, "224", v[0])
|
||||
v, ok = writer.Header()["Eth-Consensus-Version"]
|
||||
require.Equal(t, true, ok, "header not found")
|
||||
assert.Equal(t, "capella", v[0])
|
||||
assert.Equal(t, 204, writer.Code)
|
||||
assert.DeepEqual(t, responseJson, writer.Body.Bytes())
|
||||
})
|
||||
|
||||
t.Run("GET_no_grpc_status_code_header", func(t *testing.T) {
|
||||
response := &http.Response{
|
||||
Header: http.Header{},
|
||||
StatusCode: 204,
|
||||
}
|
||||
container := defaultResponseContainer()
|
||||
responseJson, err := json.Marshal(container)
|
||||
require.NoError(t, err)
|
||||
writer := httptest.NewRecorder()
|
||||
|
||||
errJson := WriteMiddlewareResponseHeadersAndBody(response, responseJson, writer)
|
||||
require.Equal(t, true, errJson == nil)
|
||||
assert.Equal(t, 204, writer.Code)
|
||||
})
|
||||
|
||||
t.Run("GET_invalid_status_code", func(t *testing.T) {
|
||||
response := &http.Response{
|
||||
Header: http.Header{"Grpc-Metadata-Eth-Consensus-Version": []string{"capella"}},
|
||||
}
|
||||
|
||||
// Set invalid status code.
|
||||
response.Header[grpc.WithPrefix(grpc.HttpCodeMetadataKey)] = []string{"invalid"}
|
||||
response.Header[grpc.WithPrefix(api.VersionHeader)] = []string{"capella"}
|
||||
|
||||
container := defaultResponseContainer()
|
||||
responseJson, err := json.Marshal(container)
|
||||
require.NoError(t, err)
|
||||
writer := httptest.NewRecorder()
|
||||
|
||||
errJson := WriteMiddlewareResponseHeadersAndBody(response, responseJson, writer)
|
||||
require.Equal(t, false, errJson == nil)
|
||||
assert.Equal(t, true, strings.Contains(errJson.Msg(), "could not parse status code"))
|
||||
assert.Equal(t, http.StatusInternalServerError, errJson.StatusCode())
|
||||
})
|
||||
|
||||
t.Run("POST", func(t *testing.T) {
|
||||
response := &http.Response{
|
||||
Header: http.Header{},
|
||||
StatusCode: 204,
|
||||
}
|
||||
container := defaultResponseContainer()
|
||||
responseJson, err := json.Marshal(container)
|
||||
require.NoError(t, err)
|
||||
writer := httptest.NewRecorder()
|
||||
|
||||
errJson := WriteMiddlewareResponseHeadersAndBody(response, responseJson, writer)
|
||||
require.Equal(t, true, errJson == nil)
|
||||
assert.Equal(t, 204, writer.Code)
|
||||
})
|
||||
|
||||
t.Run("POST_with_response_body", func(t *testing.T) {
|
||||
response := &http.Response{
|
||||
Header: http.Header{},
|
||||
StatusCode: 204,
|
||||
}
|
||||
container := defaultResponseContainer()
|
||||
responseJson, err := json.Marshal(container)
|
||||
require.NoError(t, err)
|
||||
writer := httptest.NewRecorder()
|
||||
writer.Body = &bytes.Buffer{}
|
||||
|
||||
errJson := WriteMiddlewareResponseHeadersAndBody(response, responseJson, writer)
|
||||
require.Equal(t, true, errJson == nil)
|
||||
assert.Equal(t, 204, writer.Code)
|
||||
assert.DeepEqual(t, responseJson, writer.Body.Bytes())
|
||||
})
|
||||
|
||||
t.Run("POST_with_empty_json_body", func(t *testing.T) {
|
||||
response := &http.Response{
|
||||
Header: http.Header{},
|
||||
StatusCode: 204,
|
||||
}
|
||||
responseJson, err := json.Marshal(struct{}{})
|
||||
require.NoError(t, err)
|
||||
writer := httptest.NewRecorder()
|
||||
writer.Body = &bytes.Buffer{}
|
||||
|
||||
errJson := WriteMiddlewareResponseHeadersAndBody(response, responseJson, writer)
|
||||
require.Equal(t, true, errJson == nil)
|
||||
assert.Equal(t, 204, writer.Code)
|
||||
assert.DeepEqual(t, []byte(nil), writer.Body.Bytes())
|
||||
assert.Equal(t, "0", writer.Header()["Content-Length"][0])
|
||||
})
|
||||
}
|
||||
|
||||
func TestWriteError(t *testing.T) {
|
||||
t.Run("ok", func(t *testing.T) {
|
||||
responseHeader := http.Header{
|
||||
grpc.WithPrefix(grpc.CustomErrorMetadataKey): []string{"{\"CustomField\":\"bar\"}"},
|
||||
}
|
||||
errJson := &testErrorJson{
|
||||
Message: "foo",
|
||||
Code: 500,
|
||||
}
|
||||
writer := httptest.NewRecorder()
|
||||
writer.Body = &bytes.Buffer{}
|
||||
|
||||
WriteError(writer, errJson, responseHeader)
|
||||
v, ok := writer.Header()["Content-Length"]
|
||||
require.Equal(t, true, ok, "header not found")
|
||||
require.Equal(t, 1, len(v), "wrong number of header values")
|
||||
assert.Equal(t, "48", v[0])
|
||||
v, ok = writer.Header()["Content-Type"]
|
||||
require.Equal(t, true, ok, "header not found")
|
||||
require.Equal(t, 1, len(v), "wrong number of header values")
|
||||
assert.Equal(t, "application/json", v[0])
|
||||
assert.Equal(t, 500, writer.Code)
|
||||
eDeserialize := &testErrorJson{}
|
||||
require.NoError(t, json.Unmarshal(writer.Body.Bytes(), eDeserialize))
|
||||
assert.Equal(t, "foo", eDeserialize.Message)
|
||||
assert.Equal(t, 500, eDeserialize.Code)
|
||||
assert.Equal(t, "bar", eDeserialize.CustomField)
|
||||
})
|
||||
|
||||
t.Run("invalid_custom_error_header", func(t *testing.T) {
|
||||
logHook := test.NewGlobal()
|
||||
|
||||
responseHeader := http.Header{
|
||||
grpc.WithPrefix(grpc.CustomErrorMetadataKey): []string{"invalid"},
|
||||
}
|
||||
|
||||
WriteError(httptest.NewRecorder(), &testErrorJson{}, responseHeader)
|
||||
assert.LogsContain(t, logHook, "Could not unmarshal custom error message")
|
||||
})
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
package apimiddleware
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// ---------------
|
||||
// Error handling.
|
||||
// ---------------
|
||||
|
||||
// ErrorJson describes common functionality of all JSON error representations.
|
||||
type ErrorJson interface {
|
||||
StatusCode() int
|
||||
SetCode(code int)
|
||||
Msg() string
|
||||
SetMsg(msg string)
|
||||
}
|
||||
|
||||
// DefaultErrorJson is a JSON representation of a simple error value, containing only a message and an error code.
|
||||
type DefaultErrorJson struct {
|
||||
Message string `json:"message"`
|
||||
Code int `json:"code"`
|
||||
}
|
||||
|
||||
// InternalServerErrorWithMessage returns a DefaultErrorJson with 500 code and a custom message.
|
||||
func InternalServerErrorWithMessage(err error, message string) *DefaultErrorJson {
|
||||
e := errors.Wrapf(err, message)
|
||||
return &DefaultErrorJson{
|
||||
Message: e.Error(),
|
||||
Code: http.StatusInternalServerError,
|
||||
}
|
||||
}
|
||||
|
||||
// InternalServerError returns a DefaultErrorJson with 500 code.
|
||||
func InternalServerError(err error) *DefaultErrorJson {
|
||||
return &DefaultErrorJson{
|
||||
Message: err.Error(),
|
||||
Code: http.StatusInternalServerError,
|
||||
}
|
||||
}
|
||||
|
||||
func TimeoutError() *DefaultErrorJson {
|
||||
return &DefaultErrorJson{
|
||||
Message: "Request timeout",
|
||||
Code: http.StatusRequestTimeout,
|
||||
}
|
||||
}
|
||||
|
||||
// StatusCode returns the error's underlying error code.
|
||||
func (e *DefaultErrorJson) StatusCode() int {
|
||||
return e.Code
|
||||
}
|
||||
|
||||
// Msg returns the error's underlying message.
|
||||
func (e *DefaultErrorJson) Msg() string {
|
||||
return e.Message
|
||||
}
|
||||
|
||||
// SetCode sets the error's underlying error code.
|
||||
func (e *DefaultErrorJson) SetCode(code int) {
|
||||
e.Code = code
|
||||
}
|
||||
|
||||
// SetMsg sets the error's underlying message.
|
||||
func (e *DefaultErrorJson) SetMsg(msg string) {
|
||||
e.Message = msg
|
||||
}
|
||||
@@ -6,19 +6,17 @@ import (
|
||||
"fmt"
|
||||
"net"
|
||||
"net/http"
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
gwruntime "github.com/grpc-ecosystem/grpc-gateway/v2/runtime"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v4/api/gateway/apimiddleware"
|
||||
"github.com/prysmaticlabs/prysm/v4/runtime"
|
||||
"github.com/rs/cors"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/server"
|
||||
"github.com/prysmaticlabs/prysm/v5/runtime"
|
||||
"google.golang.org/grpc"
|
||||
"google.golang.org/grpc/connectivity"
|
||||
"google.golang.org/grpc/credentials"
|
||||
"google.golang.org/grpc/credentials/insecure"
|
||||
)
|
||||
|
||||
var _ runtime.Service = (*Gateway)(nil)
|
||||
@@ -35,7 +33,6 @@ type PbHandlerRegistration func(context.Context, *gwruntime.ServeMux, *grpc.Clie
|
||||
|
||||
// MuxHandler is a function that implements the mux handler functionality.
|
||||
type MuxHandler func(
|
||||
apiMiddlewareHandler *apimiddleware.ApiProxyMiddleware,
|
||||
h http.HandlerFunc,
|
||||
w http.ResponseWriter,
|
||||
req *http.Request,
|
||||
@@ -43,16 +40,15 @@ type MuxHandler func(
|
||||
|
||||
// Config parameters for setting up the gateway service.
|
||||
type config struct {
|
||||
maxCallRecvMsgSize uint64
|
||||
remoteCert string
|
||||
gatewayAddr string
|
||||
remoteAddr string
|
||||
allowedOrigins []string
|
||||
apiMiddlewareEndpointFactory apimiddleware.EndpointFactory
|
||||
muxHandler MuxHandler
|
||||
pbHandlers []*PbMux
|
||||
router *mux.Router
|
||||
timeout time.Duration
|
||||
maxCallRecvMsgSize uint64
|
||||
remoteCert string
|
||||
gatewayAddr string
|
||||
remoteAddr string
|
||||
allowedOrigins []string
|
||||
muxHandler MuxHandler
|
||||
pbHandlers []*PbMux
|
||||
router *mux.Router
|
||||
timeout time.Duration
|
||||
}
|
||||
|
||||
// Gateway is the gRPC gateway to serve HTTP JSON traffic as a proxy and forward it to the gRPC server.
|
||||
@@ -61,7 +57,6 @@ type Gateway struct {
|
||||
conn *grpc.ClientConn
|
||||
server *http.Server
|
||||
cancel context.CancelFunc
|
||||
proxy *apimiddleware.ApiProxyMiddleware
|
||||
ctx context.Context
|
||||
startFailure error
|
||||
}
|
||||
@@ -109,15 +104,11 @@ func (g *Gateway) Start() {
|
||||
}
|
||||
}
|
||||
|
||||
corsMux := g.corsMiddleware(g.cfg.router)
|
||||
|
||||
if g.cfg.apiMiddlewareEndpointFactory != nil && !g.cfg.apiMiddlewareEndpointFactory.IsNil() {
|
||||
g.registerApiMiddleware()
|
||||
}
|
||||
corsMux := server.CorsHandler(g.cfg.allowedOrigins).Middleware(g.cfg.router)
|
||||
|
||||
if g.cfg.muxHandler != nil {
|
||||
g.cfg.router.PathPrefix("/").HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
g.cfg.muxHandler(g.proxy, corsMux.ServeHTTP, w, r)
|
||||
g.cfg.muxHandler(corsMux.ServeHTTP, w, r)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -167,35 +158,6 @@ func (g *Gateway) Stop() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Gateway) corsMiddleware(h http.Handler) http.Handler {
|
||||
c := cors.New(cors.Options{
|
||||
AllowedOrigins: g.cfg.allowedOrigins,
|
||||
AllowedMethods: []string{http.MethodPost, http.MethodGet, http.MethodDelete, http.MethodOptions},
|
||||
AllowCredentials: true,
|
||||
MaxAge: 600,
|
||||
AllowedHeaders: []string{"*"},
|
||||
})
|
||||
return c.Handler(h)
|
||||
}
|
||||
|
||||
const swaggerDir = "proto/prysm/v1alpha1/"
|
||||
|
||||
// SwaggerServer returns swagger specification files located under "/swagger/"
|
||||
func SwaggerServer() http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
if !strings.HasSuffix(r.URL.Path, ".swagger.json") {
|
||||
log.Debugf("Not found: %s", r.URL.Path)
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
log.Debugf("Serving %s\n", r.URL.Path)
|
||||
p := strings.TrimPrefix(r.URL.Path, "/swagger/")
|
||||
p = path.Join(swaggerDir, p)
|
||||
http.ServeFile(w, r, p)
|
||||
}
|
||||
}
|
||||
|
||||
// dial the gRPC server.
|
||||
func (g *Gateway) dial(ctx context.Context, network, addr string) (*grpc.ClientConn, error) {
|
||||
switch network {
|
||||
@@ -211,19 +173,21 @@ func (g *Gateway) dial(ctx context.Context, network, addr string) (*grpc.ClientC
|
||||
// dialTCP creates a client connection via TCP.
|
||||
// "addr" must be a valid TCP address with a port number.
|
||||
func (g *Gateway) dialTCP(ctx context.Context, addr string) (*grpc.ClientConn, error) {
|
||||
security := grpc.WithInsecure()
|
||||
var security grpc.DialOption
|
||||
if len(g.cfg.remoteCert) > 0 {
|
||||
creds, err := credentials.NewClientTLSFromFile(g.cfg.remoteCert, "")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
security = grpc.WithTransportCredentials(creds)
|
||||
} else {
|
||||
// Use insecure credentials when there's no remote cert provided.
|
||||
security = grpc.WithTransportCredentials(insecure.NewCredentials())
|
||||
}
|
||||
opts := []grpc.DialOption{
|
||||
security,
|
||||
grpc.WithDefaultCallOptions(grpc.MaxCallRecvMsgSize(int(g.cfg.maxCallRecvMsgSize))),
|
||||
}
|
||||
|
||||
return grpc.DialContext(ctx, addr, opts...)
|
||||
}
|
||||
|
||||
@@ -240,19 +204,9 @@ func (g *Gateway) dialUnix(ctx context.Context, addr string) (*grpc.ClientConn,
|
||||
return d(addr, 0)
|
||||
}
|
||||
opts := []grpc.DialOption{
|
||||
grpc.WithInsecure(),
|
||||
grpc.WithTransportCredentials(insecure.NewCredentials()),
|
||||
grpc.WithContextDialer(f),
|
||||
grpc.WithDefaultCallOptions(grpc.MaxCallRecvMsgSize(int(g.cfg.maxCallRecvMsgSize))),
|
||||
}
|
||||
return grpc.DialContext(ctx, addr, opts...)
|
||||
}
|
||||
|
||||
func (g *Gateway) registerApiMiddleware() {
|
||||
g.proxy = &apimiddleware.ApiProxyMiddleware{
|
||||
GatewayAddress: g.cfg.gatewayAddr,
|
||||
EndpointCreator: g.cfg.apiMiddlewareEndpointFactory,
|
||||
Timeout: g.cfg.timeout,
|
||||
}
|
||||
log.Info("Starting API middleware")
|
||||
g.proxy.Run(g.cfg.router)
|
||||
}
|
||||
|
||||
@@ -10,44 +10,25 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/prysmaticlabs/prysm/v4/api/gateway/apimiddleware"
|
||||
"github.com/prysmaticlabs/prysm/v4/cmd/beacon-chain/flags"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/require"
|
||||
"github.com/prysmaticlabs/prysm/v5/cmd/beacon-chain/flags"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
logTest "github.com/sirupsen/logrus/hooks/test"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
type mockEndpointFactory struct {
|
||||
}
|
||||
|
||||
func (*mockEndpointFactory) Paths() []string {
|
||||
return []string{}
|
||||
}
|
||||
|
||||
func (*mockEndpointFactory) Create(_ string) (*apimiddleware.Endpoint, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (*mockEndpointFactory) IsNil() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func TestGateway_Customized(t *testing.T) {
|
||||
r := mux.NewRouter()
|
||||
cert := "cert"
|
||||
origins := []string{"origin"}
|
||||
size := uint64(100)
|
||||
endpointFactory := &mockEndpointFactory{}
|
||||
|
||||
opts := []Option{
|
||||
WithRouter(r),
|
||||
WithRemoteCert(cert),
|
||||
WithAllowedOrigins(origins),
|
||||
WithMaxCallRecvMsgSize(size),
|
||||
WithApiMiddleware(endpointFactory),
|
||||
WithMuxHandler(func(
|
||||
_ *apimiddleware.ApiProxyMiddleware,
|
||||
_ http.HandlerFunc,
|
||||
_ http.ResponseWriter,
|
||||
_ *http.Request,
|
||||
@@ -63,7 +44,6 @@ func TestGateway_Customized(t *testing.T) {
|
||||
require.Equal(t, 1, len(g.cfg.allowedOrigins))
|
||||
assert.Equal(t, origins[0], g.cfg.allowedOrigins[0])
|
||||
assert.Equal(t, size, g.cfg.maxCallRecvMsgSize)
|
||||
assert.Equal(t, endpointFactory, g.cfg.apiMiddlewareEndpointFactory)
|
||||
}
|
||||
|
||||
func TestGateway_StartStop(t *testing.T) {
|
||||
@@ -83,7 +63,6 @@ func TestGateway_StartStop(t *testing.T) {
|
||||
WithGatewayAddr(gatewayAddress),
|
||||
WithRemoteAddr(selfAddress),
|
||||
WithMuxHandler(func(
|
||||
_ *apimiddleware.ApiProxyMiddleware,
|
||||
_ http.HandlerFunc,
|
||||
_ http.ResponseWriter,
|
||||
_ *http.Request,
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
gwruntime "github.com/grpc-ecosystem/grpc-gateway/v2/runtime"
|
||||
"github.com/prysmaticlabs/prysm/v4/api/gateway/apimiddleware"
|
||||
)
|
||||
|
||||
type Option func(g *Gateway) error
|
||||
@@ -70,14 +69,6 @@ func WithMaxCallRecvMsgSize(size uint64) Option {
|
||||
}
|
||||
}
|
||||
|
||||
// WithApiMiddleware allows adding an API middleware proxy to the gateway.
|
||||
func WithApiMiddleware(endpointFactory apimiddleware.EndpointFactory) Option {
|
||||
return func(g *Gateway) error {
|
||||
g.cfg.apiMiddlewareEndpointFactory = endpointFactory
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// WithTimeout allows changing the timeout value for API calls.
|
||||
func WithTimeout(seconds uint64) Option {
|
||||
return func(g *Gateway) error {
|
||||
|
||||
@@ -6,7 +6,7 @@ go_library(
|
||||
"grpcutils.go",
|
||||
"parameters.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v4/api/grpc",
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/grpc",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"@com_github_sirupsen_logrus//:go_default_library",
|
||||
|
||||
@@ -7,8 +7,8 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/grpc-ecosystem/grpc-gateway/v2/runtime"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/require"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
logTest "github.com/sirupsen/logrus/hooks/test"
|
||||
"google.golang.org/grpc"
|
||||
"google.golang.org/grpc/metadata"
|
||||
|
||||
@@ -7,4 +7,6 @@ const (
|
||||
ConsensusBlockValueHeader = "Eth-Consensus-Block-Value"
|
||||
JsonMediaType = "application/json"
|
||||
OctetStreamMediaType = "application/octet-stream"
|
||||
EventStreamMediaType = "text/event-stream"
|
||||
KeepAlive = "keep-alive"
|
||||
)
|
||||
|
||||
@@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test")
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = ["pagination.go"],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v4/api/pagination",
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/pagination",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//config/params:go_default_library",
|
||||
|
||||
@@ -6,7 +6,7 @@ import (
|
||||
"strconv"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prysmaticlabs/prysm/v4/config/params"
|
||||
"github.com/prysmaticlabs/prysm/v5/config/params"
|
||||
)
|
||||
|
||||
// StartAndEndPage takes in the requested page token, wanted page size, total page size.
|
||||
|
||||
@@ -3,9 +3,9 @@ package pagination_test
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/api/pagination"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/require"
|
||||
"github.com/prysmaticlabs/prysm/v5/api/pagination"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
)
|
||||
|
||||
func TestStartAndEndPage(t *testing.T) {
|
||||
|
||||
@@ -3,16 +3,22 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test")
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"error.go",
|
||||
"middleware.go",
|
||||
"util.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v4/api/server",
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/server",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"@com_github_gorilla_mux//:go_default_library",
|
||||
"@com_github_rs_cors//:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "go_default_test",
|
||||
srcs = [
|
||||
"error_test.go",
|
||||
"middleware_test.go",
|
||||
"util_test.go",
|
||||
],
|
||||
|
||||
45
api/server/error.go
Normal file
45
api/server/error.go
Normal file
@@ -0,0 +1,45 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// DecodeError represents an error resulting from trying to decode an HTTP request.
|
||||
// It tracks the full field name for which decoding failed.
|
||||
type DecodeError struct {
|
||||
path []string
|
||||
err error
|
||||
}
|
||||
|
||||
// NewDecodeError wraps an error (either the initial decoding error or another DecodeError).
|
||||
// The current field that failed decoding must be passed in.
|
||||
func NewDecodeError(err error, field string) *DecodeError {
|
||||
de, ok := err.(*DecodeError)
|
||||
if ok {
|
||||
return &DecodeError{path: append([]string{field}, de.path...), err: de.err}
|
||||
}
|
||||
return &DecodeError{path: []string{field}, err: err}
|
||||
}
|
||||
|
||||
// Error returns the formatted error message which contains the full field name and the actual decoding error.
|
||||
func (e *DecodeError) Error() string {
|
||||
return fmt.Sprintf("could not decode %s: %s", strings.Join(e.path, "."), e.err.Error())
|
||||
}
|
||||
|
||||
// IndexedVerificationFailureError wraps a collection of verification failures.
|
||||
type IndexedVerificationFailureError struct {
|
||||
Message string `json:"message"`
|
||||
Code int `json:"code"`
|
||||
Failures []*IndexedVerificationFailure `json:"failures"`
|
||||
}
|
||||
|
||||
func (e *IndexedVerificationFailureError) StatusCode() int {
|
||||
return e.Code
|
||||
}
|
||||
|
||||
// IndexedVerificationFailure represents an issue when verifying a single indexed object e.g. an item in an array.
|
||||
type IndexedVerificationFailure struct {
|
||||
Index int `json:"index"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
16
api/server/error_test.go
Normal file
16
api/server/error_test.go
Normal file
@@ -0,0 +1,16 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/assert"
|
||||
)
|
||||
|
||||
func TestDecodeError(t *testing.T) {
|
||||
e := errors.New("not a number")
|
||||
de := NewDecodeError(e, "Z")
|
||||
de = NewDecodeError(de, "Y")
|
||||
de = NewDecodeError(de, "X")
|
||||
assert.Equal(t, "could not decode X.Y.Z: not a number", de.Error())
|
||||
}
|
||||
@@ -2,8 +2,12 @@ package server
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/rs/cors"
|
||||
)
|
||||
|
||||
// NormalizeQueryValuesHandler normalizes an input query of "key=value1,value2,value3" to "key=value1&key=value2&key=value3"
|
||||
func NormalizeQueryValuesHandler(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
query := r.URL.Query()
|
||||
@@ -13,3 +17,16 @@ func NormalizeQueryValuesHandler(next http.Handler) http.Handler {
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
||||
// CorsHandler sets the cors settings on api endpoints
|
||||
func CorsHandler(allowOrigins []string) mux.MiddlewareFunc {
|
||||
c := cors.New(cors.Options{
|
||||
AllowedOrigins: allowOrigins,
|
||||
AllowedMethods: []string{http.MethodPost, http.MethodGet, http.MethodDelete, http.MethodOptions},
|
||||
AllowCredentials: true,
|
||||
MaxAge: 600,
|
||||
AllowedHeaders: []string{"*"},
|
||||
})
|
||||
|
||||
return c.Handler
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/require"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
)
|
||||
|
||||
func TestNormalizeQueryValuesHandler(t *testing.T) {
|
||||
|
||||
50
api/server/structs/BUILD.bazel
Normal file
50
api/server/structs/BUILD.bazel
Normal file
@@ -0,0 +1,50 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library", "go_test")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"block.go",
|
||||
"conversions.go",
|
||||
"conversions_block.go",
|
||||
"conversions_state.go",
|
||||
"endpoints_beacon.go",
|
||||
"endpoints_blob.go",
|
||||
"endpoints_builder.go",
|
||||
"endpoints_config.go",
|
||||
"endpoints_debug.go",
|
||||
"endpoints_events.go",
|
||||
"endpoints_lightclient.go",
|
||||
"endpoints_node.go",
|
||||
"endpoints_rewards.go",
|
||||
"endpoints_validator.go",
|
||||
"other.go",
|
||||
"state.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/api/server/structs",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//api/server:go_default_library",
|
||||
"//beacon-chain/state:go_default_library",
|
||||
"//config/fieldparams:go_default_library",
|
||||
"//consensus-types/primitives:go_default_library",
|
||||
"//consensus-types/validator:go_default_library",
|
||||
"//container/slice:go_default_library",
|
||||
"//encoding/bytesutil:go_default_library",
|
||||
"//math:go_default_library",
|
||||
"//proto/engine/v1:go_default_library",
|
||||
"//proto/prysm/v1alpha1:go_default_library",
|
||||
"@com_github_ethereum_go_ethereum//common:go_default_library",
|
||||
"@com_github_ethereum_go_ethereum//common/hexutil:go_default_library",
|
||||
"@com_github_pkg_errors//:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "go_default_test",
|
||||
srcs = ["conversions_test.go"],
|
||||
embed = [":go_default_library"],
|
||||
deps = [
|
||||
"//proto/prysm/v1alpha1:go_default_library",
|
||||
"//testing/require:go_default_library",
|
||||
],
|
||||
)
|
||||
353
api/server/structs/block.go
Normal file
353
api/server/structs/block.go
Normal file
@@ -0,0 +1,353 @@
|
||||
package structs
|
||||
|
||||
type SignedBeaconBlock struct {
|
||||
Message *BeaconBlock `json:"message"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type BeaconBlock struct {
|
||||
Slot string `json:"slot"`
|
||||
ProposerIndex string `json:"proposer_index"`
|
||||
ParentRoot string `json:"parent_root"`
|
||||
StateRoot string `json:"state_root"`
|
||||
Body *BeaconBlockBody `json:"body"`
|
||||
}
|
||||
|
||||
type BeaconBlockBody struct {
|
||||
RandaoReveal string `json:"randao_reveal"`
|
||||
Eth1Data *Eth1Data `json:"eth1_data"`
|
||||
Graffiti string `json:"graffiti"`
|
||||
ProposerSlashings []*ProposerSlashing `json:"proposer_slashings"`
|
||||
AttesterSlashings []*AttesterSlashing `json:"attester_slashings"`
|
||||
Attestations []*Attestation `json:"attestations"`
|
||||
Deposits []*Deposit `json:"deposits"`
|
||||
VoluntaryExits []*SignedVoluntaryExit `json:"voluntary_exits"`
|
||||
}
|
||||
|
||||
type SignedBeaconBlockAltair struct {
|
||||
Message *BeaconBlockAltair `json:"message"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type BeaconBlockAltair struct {
|
||||
Slot string `json:"slot"`
|
||||
ProposerIndex string `json:"proposer_index"`
|
||||
ParentRoot string `json:"parent_root"`
|
||||
StateRoot string `json:"state_root"`
|
||||
Body *BeaconBlockBodyAltair `json:"body"`
|
||||
}
|
||||
|
||||
type BeaconBlockBodyAltair struct {
|
||||
RandaoReveal string `json:"randao_reveal"`
|
||||
Eth1Data *Eth1Data `json:"eth1_data"`
|
||||
Graffiti string `json:"graffiti"`
|
||||
ProposerSlashings []*ProposerSlashing `json:"proposer_slashings"`
|
||||
AttesterSlashings []*AttesterSlashing `json:"attester_slashings"`
|
||||
Attestations []*Attestation `json:"attestations"`
|
||||
Deposits []*Deposit `json:"deposits"`
|
||||
VoluntaryExits []*SignedVoluntaryExit `json:"voluntary_exits"`
|
||||
SyncAggregate *SyncAggregate `json:"sync_aggregate"`
|
||||
}
|
||||
|
||||
type SignedBeaconBlockBellatrix struct {
|
||||
Message *BeaconBlockBellatrix `json:"message"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type BeaconBlockBellatrix struct {
|
||||
Slot string `json:"slot"`
|
||||
ProposerIndex string `json:"proposer_index"`
|
||||
ParentRoot string `json:"parent_root"`
|
||||
StateRoot string `json:"state_root"`
|
||||
Body *BeaconBlockBodyBellatrix `json:"body"`
|
||||
}
|
||||
|
||||
type BeaconBlockBodyBellatrix struct {
|
||||
RandaoReveal string `json:"randao_reveal"`
|
||||
Eth1Data *Eth1Data `json:"eth1_data"`
|
||||
Graffiti string `json:"graffiti"`
|
||||
ProposerSlashings []*ProposerSlashing `json:"proposer_slashings"`
|
||||
AttesterSlashings []*AttesterSlashing `json:"attester_slashings"`
|
||||
Attestations []*Attestation `json:"attestations"`
|
||||
Deposits []*Deposit `json:"deposits"`
|
||||
VoluntaryExits []*SignedVoluntaryExit `json:"voluntary_exits"`
|
||||
SyncAggregate *SyncAggregate `json:"sync_aggregate"`
|
||||
ExecutionPayload *ExecutionPayload `json:"execution_payload"`
|
||||
}
|
||||
|
||||
type SignedBlindedBeaconBlockBellatrix struct {
|
||||
Message *BlindedBeaconBlockBellatrix `json:"message"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type BlindedBeaconBlockBellatrix struct {
|
||||
Slot string `json:"slot"`
|
||||
ProposerIndex string `json:"proposer_index"`
|
||||
ParentRoot string `json:"parent_root"`
|
||||
StateRoot string `json:"state_root"`
|
||||
Body *BlindedBeaconBlockBodyBellatrix `json:"body"`
|
||||
}
|
||||
|
||||
type BlindedBeaconBlockBodyBellatrix struct {
|
||||
RandaoReveal string `json:"randao_reveal"`
|
||||
Eth1Data *Eth1Data `json:"eth1_data"`
|
||||
Graffiti string `json:"graffiti"`
|
||||
ProposerSlashings []*ProposerSlashing `json:"proposer_slashings"`
|
||||
AttesterSlashings []*AttesterSlashing `json:"attester_slashings"`
|
||||
Attestations []*Attestation `json:"attestations"`
|
||||
Deposits []*Deposit `json:"deposits"`
|
||||
VoluntaryExits []*SignedVoluntaryExit `json:"voluntary_exits"`
|
||||
SyncAggregate *SyncAggregate `json:"sync_aggregate"`
|
||||
ExecutionPayloadHeader *ExecutionPayloadHeader `json:"execution_payload_header"`
|
||||
}
|
||||
|
||||
type SignedBeaconBlockCapella struct {
|
||||
Message *BeaconBlockCapella `json:"message"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type BeaconBlockCapella struct {
|
||||
Slot string `json:"slot"`
|
||||
ProposerIndex string `json:"proposer_index"`
|
||||
ParentRoot string `json:"parent_root"`
|
||||
StateRoot string `json:"state_root"`
|
||||
Body *BeaconBlockBodyCapella `json:"body"`
|
||||
}
|
||||
|
||||
type BeaconBlockBodyCapella struct {
|
||||
RandaoReveal string `json:"randao_reveal"`
|
||||
Eth1Data *Eth1Data `json:"eth1_data"`
|
||||
Graffiti string `json:"graffiti"`
|
||||
ProposerSlashings []*ProposerSlashing `json:"proposer_slashings"`
|
||||
AttesterSlashings []*AttesterSlashing `json:"attester_slashings"`
|
||||
Attestations []*Attestation `json:"attestations"`
|
||||
Deposits []*Deposit `json:"deposits"`
|
||||
VoluntaryExits []*SignedVoluntaryExit `json:"voluntary_exits"`
|
||||
SyncAggregate *SyncAggregate `json:"sync_aggregate"`
|
||||
ExecutionPayload *ExecutionPayloadCapella `json:"execution_payload"`
|
||||
BLSToExecutionChanges []*SignedBLSToExecutionChange `json:"bls_to_execution_changes"`
|
||||
}
|
||||
|
||||
type SignedBlindedBeaconBlockCapella struct {
|
||||
Message *BlindedBeaconBlockCapella `json:"message"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type BlindedBeaconBlockCapella struct {
|
||||
Slot string `json:"slot"`
|
||||
ProposerIndex string `json:"proposer_index"`
|
||||
ParentRoot string `json:"parent_root"`
|
||||
StateRoot string `json:"state_root"`
|
||||
Body *BlindedBeaconBlockBodyCapella `json:"body"`
|
||||
}
|
||||
|
||||
type BlindedBeaconBlockBodyCapella struct {
|
||||
RandaoReveal string `json:"randao_reveal"`
|
||||
Eth1Data *Eth1Data `json:"eth1_data"`
|
||||
Graffiti string `json:"graffiti"`
|
||||
ProposerSlashings []*ProposerSlashing `json:"proposer_slashings"`
|
||||
AttesterSlashings []*AttesterSlashing `json:"attester_slashings"`
|
||||
Attestations []*Attestation `json:"attestations"`
|
||||
Deposits []*Deposit `json:"deposits"`
|
||||
VoluntaryExits []*SignedVoluntaryExit `json:"voluntary_exits"`
|
||||
SyncAggregate *SyncAggregate `json:"sync_aggregate"`
|
||||
ExecutionPayloadHeader *ExecutionPayloadHeaderCapella `json:"execution_payload_header"`
|
||||
BLSToExecutionChanges []*SignedBLSToExecutionChange `json:"bls_to_execution_changes"`
|
||||
}
|
||||
|
||||
type SignedBeaconBlockContentsDeneb struct {
|
||||
SignedBlock *SignedBeaconBlockDeneb `json:"signed_block"`
|
||||
KzgProofs []string `json:"kzg_proofs"`
|
||||
Blobs []string `json:"blobs"`
|
||||
}
|
||||
|
||||
type BeaconBlockContentsDeneb struct {
|
||||
Block *BeaconBlockDeneb `json:"block"`
|
||||
KzgProofs []string `json:"kzg_proofs"`
|
||||
Blobs []string `json:"blobs"`
|
||||
}
|
||||
|
||||
type SignedBeaconBlockDeneb struct {
|
||||
Message *BeaconBlockDeneb `json:"message"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type BeaconBlockDeneb struct {
|
||||
Slot string `json:"slot"`
|
||||
ProposerIndex string `json:"proposer_index"`
|
||||
ParentRoot string `json:"parent_root"`
|
||||
StateRoot string `json:"state_root"`
|
||||
Body *BeaconBlockBodyDeneb `json:"body"`
|
||||
}
|
||||
|
||||
type BeaconBlockBodyDeneb struct {
|
||||
RandaoReveal string `json:"randao_reveal"`
|
||||
Eth1Data *Eth1Data `json:"eth1_data"`
|
||||
Graffiti string `json:"graffiti"`
|
||||
ProposerSlashings []*ProposerSlashing `json:"proposer_slashings"`
|
||||
AttesterSlashings []*AttesterSlashing `json:"attester_slashings"`
|
||||
Attestations []*Attestation `json:"attestations"`
|
||||
Deposits []*Deposit `json:"deposits"`
|
||||
VoluntaryExits []*SignedVoluntaryExit `json:"voluntary_exits"`
|
||||
SyncAggregate *SyncAggregate `json:"sync_aggregate"`
|
||||
ExecutionPayload *ExecutionPayloadDeneb `json:"execution_payload"`
|
||||
BLSToExecutionChanges []*SignedBLSToExecutionChange `json:"bls_to_execution_changes"`
|
||||
BlobKzgCommitments []string `json:"blob_kzg_commitments"`
|
||||
}
|
||||
|
||||
type BlindedBeaconBlockDeneb struct {
|
||||
Slot string `json:"slot"`
|
||||
ProposerIndex string `json:"proposer_index"`
|
||||
ParentRoot string `json:"parent_root"`
|
||||
StateRoot string `json:"state_root"`
|
||||
Body *BlindedBeaconBlockBodyDeneb `json:"body"`
|
||||
}
|
||||
|
||||
type SignedBlindedBeaconBlockDeneb struct {
|
||||
Message *BlindedBeaconBlockDeneb `json:"message"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type BlindedBeaconBlockBodyDeneb struct {
|
||||
RandaoReveal string `json:"randao_reveal"`
|
||||
Eth1Data *Eth1Data `json:"eth1_data"`
|
||||
Graffiti string `json:"graffiti"`
|
||||
ProposerSlashings []*ProposerSlashing `json:"proposer_slashings"`
|
||||
AttesterSlashings []*AttesterSlashing `json:"attester_slashings"`
|
||||
Attestations []*Attestation `json:"attestations"`
|
||||
Deposits []*Deposit `json:"deposits"`
|
||||
VoluntaryExits []*SignedVoluntaryExit `json:"voluntary_exits"`
|
||||
SyncAggregate *SyncAggregate `json:"sync_aggregate"`
|
||||
ExecutionPayloadHeader *ExecutionPayloadHeaderDeneb `json:"execution_payload_header"`
|
||||
BLSToExecutionChanges []*SignedBLSToExecutionChange `json:"bls_to_execution_changes"`
|
||||
BlobKzgCommitments []string `json:"blob_kzg_commitments"`
|
||||
}
|
||||
|
||||
type SignedBeaconBlockHeaderContainer struct {
|
||||
Header *SignedBeaconBlockHeader `json:"header"`
|
||||
Root string `json:"root"`
|
||||
Canonical bool `json:"canonical"`
|
||||
}
|
||||
|
||||
type SignedBeaconBlockHeader struct {
|
||||
Message *BeaconBlockHeader `json:"message"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type BeaconBlockHeader struct {
|
||||
Slot string `json:"slot"`
|
||||
ProposerIndex string `json:"proposer_index"`
|
||||
ParentRoot string `json:"parent_root"`
|
||||
StateRoot string `json:"state_root"`
|
||||
BodyRoot string `json:"body_root"`
|
||||
}
|
||||
|
||||
type ExecutionPayload struct {
|
||||
ParentHash string `json:"parent_hash"`
|
||||
FeeRecipient string `json:"fee_recipient"`
|
||||
StateRoot string `json:"state_root"`
|
||||
ReceiptsRoot string `json:"receipts_root"`
|
||||
LogsBloom string `json:"logs_bloom"`
|
||||
PrevRandao string `json:"prev_randao"`
|
||||
BlockNumber string `json:"block_number"`
|
||||
GasLimit string `json:"gas_limit"`
|
||||
GasUsed string `json:"gas_used"`
|
||||
Timestamp string `json:"timestamp"`
|
||||
ExtraData string `json:"extra_data"`
|
||||
BaseFeePerGas string `json:"base_fee_per_gas"`
|
||||
BlockHash string `json:"block_hash"`
|
||||
Transactions []string `json:"transactions"`
|
||||
}
|
||||
|
||||
type ExecutionPayloadHeader struct {
|
||||
ParentHash string `json:"parent_hash"`
|
||||
FeeRecipient string `json:"fee_recipient"`
|
||||
StateRoot string `json:"state_root"`
|
||||
ReceiptsRoot string `json:"receipts_root"`
|
||||
LogsBloom string `json:"logs_bloom"`
|
||||
PrevRandao string `json:"prev_randao"`
|
||||
BlockNumber string `json:"block_number"`
|
||||
GasLimit string `json:"gas_limit"`
|
||||
GasUsed string `json:"gas_used"`
|
||||
Timestamp string `json:"timestamp"`
|
||||
ExtraData string `json:"extra_data"`
|
||||
BaseFeePerGas string `json:"base_fee_per_gas"`
|
||||
BlockHash string `json:"block_hash"`
|
||||
TransactionsRoot string `json:"transactions_root"`
|
||||
}
|
||||
|
||||
type ExecutionPayloadCapella struct {
|
||||
ParentHash string `json:"parent_hash"`
|
||||
FeeRecipient string `json:"fee_recipient"`
|
||||
StateRoot string `json:"state_root"`
|
||||
ReceiptsRoot string `json:"receipts_root"`
|
||||
LogsBloom string `json:"logs_bloom"`
|
||||
PrevRandao string `json:"prev_randao"`
|
||||
BlockNumber string `json:"block_number"`
|
||||
GasLimit string `json:"gas_limit"`
|
||||
GasUsed string `json:"gas_used"`
|
||||
Timestamp string `json:"timestamp"`
|
||||
ExtraData string `json:"extra_data"`
|
||||
BaseFeePerGas string `json:"base_fee_per_gas"`
|
||||
BlockHash string `json:"block_hash"`
|
||||
Transactions []string `json:"transactions"`
|
||||
Withdrawals []*Withdrawal `json:"withdrawals"`
|
||||
}
|
||||
|
||||
type ExecutionPayloadHeaderCapella struct {
|
||||
ParentHash string `json:"parent_hash"`
|
||||
FeeRecipient string `json:"fee_recipient"`
|
||||
StateRoot string `json:"state_root"`
|
||||
ReceiptsRoot string `json:"receipts_root"`
|
||||
LogsBloom string `json:"logs_bloom"`
|
||||
PrevRandao string `json:"prev_randao"`
|
||||
BlockNumber string `json:"block_number"`
|
||||
GasLimit string `json:"gas_limit"`
|
||||
GasUsed string `json:"gas_used"`
|
||||
Timestamp string `json:"timestamp"`
|
||||
ExtraData string `json:"extra_data"`
|
||||
BaseFeePerGas string `json:"base_fee_per_gas"`
|
||||
BlockHash string `json:"block_hash"`
|
||||
TransactionsRoot string `json:"transactions_root"`
|
||||
WithdrawalsRoot string `json:"withdrawals_root"`
|
||||
}
|
||||
|
||||
type ExecutionPayloadDeneb struct {
|
||||
ParentHash string `json:"parent_hash"`
|
||||
FeeRecipient string `json:"fee_recipient"`
|
||||
StateRoot string `json:"state_root"`
|
||||
ReceiptsRoot string `json:"receipts_root"`
|
||||
LogsBloom string `json:"logs_bloom"`
|
||||
PrevRandao string `json:"prev_randao"`
|
||||
BlockNumber string `json:"block_number"`
|
||||
GasLimit string `json:"gas_limit"`
|
||||
GasUsed string `json:"gas_used"`
|
||||
Timestamp string `json:"timestamp"`
|
||||
ExtraData string `json:"extra_data"`
|
||||
BaseFeePerGas string `json:"base_fee_per_gas"`
|
||||
BlockHash string `json:"block_hash"`
|
||||
Transactions []string `json:"transactions"`
|
||||
Withdrawals []*Withdrawal `json:"withdrawals"`
|
||||
BlobGasUsed string `json:"blob_gas_used"`
|
||||
ExcessBlobGas string `json:"excess_blob_gas"`
|
||||
}
|
||||
|
||||
type ExecutionPayloadHeaderDeneb struct {
|
||||
ParentHash string `json:"parent_hash"`
|
||||
FeeRecipient string `json:"fee_recipient"`
|
||||
StateRoot string `json:"state_root"`
|
||||
ReceiptsRoot string `json:"receipts_root"`
|
||||
LogsBloom string `json:"logs_bloom"`
|
||||
PrevRandao string `json:"prev_randao"`
|
||||
BlockNumber string `json:"block_number"`
|
||||
GasLimit string `json:"gas_limit"`
|
||||
GasUsed string `json:"gas_used"`
|
||||
Timestamp string `json:"timestamp"`
|
||||
ExtraData string `json:"extra_data"`
|
||||
BaseFeePerGas string `json:"base_fee_per_gas"`
|
||||
BlockHash string `json:"block_hash"`
|
||||
TransactionsRoot string `json:"transactions_root"`
|
||||
WithdrawalsRoot string `json:"withdrawals_root"`
|
||||
BlobGasUsed string `json:"blob_gas_used"`
|
||||
ExcessBlobGas string `json:"excess_blob_gas"`
|
||||
}
|
||||
1090
api/server/structs/conversions.go
Normal file
1090
api/server/structs/conversions.go
Normal file
File diff suppressed because it is too large
Load Diff
2342
api/server/structs/conversions_block.go
Normal file
2342
api/server/structs/conversions_block.go
Normal file
File diff suppressed because it is too large
Load Diff
595
api/server/structs/conversions_state.go
Normal file
595
api/server/structs/conversions_state.go
Normal file
@@ -0,0 +1,595 @@
|
||||
package structs
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
beaconState "github.com/prysmaticlabs/prysm/v5/beacon-chain/state"
|
||||
enginev1 "github.com/prysmaticlabs/prysm/v5/proto/engine/v1"
|
||||
)
|
||||
|
||||
var errPayloadHeaderNotFound = errors.New("expected payload header not found")
|
||||
|
||||
func BeaconStateFromConsensus(st beaconState.BeaconState) (*BeaconState, error) {
|
||||
srcBr := st.BlockRoots()
|
||||
br := make([]string, len(srcBr))
|
||||
for i, r := range srcBr {
|
||||
br[i] = hexutil.Encode(r)
|
||||
}
|
||||
srcSr := st.StateRoots()
|
||||
sr := make([]string, len(srcSr))
|
||||
for i, r := range srcSr {
|
||||
sr[i] = hexutil.Encode(r)
|
||||
}
|
||||
srcHr, err := st.HistoricalRoots()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
hr := make([]string, len(srcHr))
|
||||
for i, r := range srcHr {
|
||||
hr[i] = hexutil.Encode(r)
|
||||
}
|
||||
srcVotes := st.Eth1DataVotes()
|
||||
votes := make([]*Eth1Data, len(srcVotes))
|
||||
for i, e := range srcVotes {
|
||||
votes[i] = Eth1DataFromConsensus(e)
|
||||
}
|
||||
srcVals := st.Validators()
|
||||
vals := make([]*Validator, len(srcVals))
|
||||
for i, v := range srcVals {
|
||||
vals[i] = ValidatorFromConsensus(v)
|
||||
}
|
||||
srcBals := st.Balances()
|
||||
bals := make([]string, len(srcBals))
|
||||
for i, b := range srcBals {
|
||||
bals[i] = fmt.Sprintf("%d", b)
|
||||
}
|
||||
srcRm := st.RandaoMixes()
|
||||
rm := make([]string, len(srcRm))
|
||||
for i, m := range srcRm {
|
||||
rm[i] = hexutil.Encode(m)
|
||||
}
|
||||
srcSlashings := st.Slashings()
|
||||
slashings := make([]string, len(srcSlashings))
|
||||
for i, s := range srcSlashings {
|
||||
slashings[i] = fmt.Sprintf("%d", s)
|
||||
}
|
||||
srcPrevAtts, err := st.PreviousEpochAttestations()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
prevAtts := make([]*PendingAttestation, len(srcPrevAtts))
|
||||
for i, a := range srcPrevAtts {
|
||||
prevAtts[i] = PendingAttestationFromConsensus(a)
|
||||
}
|
||||
srcCurrAtts, err := st.CurrentEpochAttestations()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
currAtts := make([]*PendingAttestation, len(srcCurrAtts))
|
||||
for i, a := range srcCurrAtts {
|
||||
currAtts[i] = PendingAttestationFromConsensus(a)
|
||||
}
|
||||
|
||||
return &BeaconState{
|
||||
GenesisTime: fmt.Sprintf("%d", st.GenesisTime()),
|
||||
GenesisValidatorsRoot: hexutil.Encode(st.GenesisValidatorsRoot()),
|
||||
Slot: fmt.Sprintf("%d", st.Slot()),
|
||||
Fork: ForkFromConsensus(st.Fork()),
|
||||
LatestBlockHeader: BeaconBlockHeaderFromConsensus(st.LatestBlockHeader()),
|
||||
BlockRoots: br,
|
||||
StateRoots: sr,
|
||||
HistoricalRoots: hr,
|
||||
Eth1Data: Eth1DataFromConsensus(st.Eth1Data()),
|
||||
Eth1DataVotes: votes,
|
||||
Eth1DepositIndex: fmt.Sprintf("%d", st.Eth1DepositIndex()),
|
||||
Validators: vals,
|
||||
Balances: bals,
|
||||
RandaoMixes: rm,
|
||||
Slashings: slashings,
|
||||
PreviousEpochAttestations: prevAtts,
|
||||
CurrentEpochAttestations: currAtts,
|
||||
JustificationBits: hexutil.Encode(st.JustificationBits()),
|
||||
PreviousJustifiedCheckpoint: CheckpointFromConsensus(st.PreviousJustifiedCheckpoint()),
|
||||
CurrentJustifiedCheckpoint: CheckpointFromConsensus(st.CurrentJustifiedCheckpoint()),
|
||||
FinalizedCheckpoint: CheckpointFromConsensus(st.FinalizedCheckpoint()),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func BeaconStateAltairFromConsensus(st beaconState.BeaconState) (*BeaconStateAltair, error) {
|
||||
srcBr := st.BlockRoots()
|
||||
br := make([]string, len(srcBr))
|
||||
for i, r := range srcBr {
|
||||
br[i] = hexutil.Encode(r)
|
||||
}
|
||||
srcSr := st.StateRoots()
|
||||
sr := make([]string, len(srcSr))
|
||||
for i, r := range srcSr {
|
||||
sr[i] = hexutil.Encode(r)
|
||||
}
|
||||
srcHr, err := st.HistoricalRoots()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
hr := make([]string, len(srcHr))
|
||||
for i, r := range srcHr {
|
||||
hr[i] = hexutil.Encode(r)
|
||||
}
|
||||
srcVotes := st.Eth1DataVotes()
|
||||
votes := make([]*Eth1Data, len(srcVotes))
|
||||
for i, e := range srcVotes {
|
||||
votes[i] = Eth1DataFromConsensus(e)
|
||||
}
|
||||
srcVals := st.Validators()
|
||||
vals := make([]*Validator, len(srcVals))
|
||||
for i, v := range srcVals {
|
||||
vals[i] = ValidatorFromConsensus(v)
|
||||
}
|
||||
srcBals := st.Balances()
|
||||
bals := make([]string, len(srcBals))
|
||||
for i, b := range srcBals {
|
||||
bals[i] = fmt.Sprintf("%d", b)
|
||||
}
|
||||
srcRm := st.RandaoMixes()
|
||||
rm := make([]string, len(srcRm))
|
||||
for i, m := range srcRm {
|
||||
rm[i] = hexutil.Encode(m)
|
||||
}
|
||||
srcSlashings := st.Slashings()
|
||||
slashings := make([]string, len(srcSlashings))
|
||||
for i, s := range srcSlashings {
|
||||
slashings[i] = fmt.Sprintf("%d", s)
|
||||
}
|
||||
srcPrevPart, err := st.PreviousEpochParticipation()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
prevPart := make([]string, len(srcPrevPart))
|
||||
for i, p := range srcPrevPart {
|
||||
prevPart[i] = fmt.Sprintf("%d", p)
|
||||
}
|
||||
srcCurrPart, err := st.CurrentEpochParticipation()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
currPart := make([]string, len(srcCurrPart))
|
||||
for i, p := range srcCurrPart {
|
||||
currPart[i] = fmt.Sprintf("%d", p)
|
||||
}
|
||||
srcIs, err := st.InactivityScores()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
is := make([]string, len(srcIs))
|
||||
for i, s := range srcIs {
|
||||
is[i] = fmt.Sprintf("%d", s)
|
||||
}
|
||||
currSc, err := st.CurrentSyncCommittee()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
nextSc, err := st.NextSyncCommittee()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &BeaconStateAltair{
|
||||
GenesisTime: fmt.Sprintf("%d", st.GenesisTime()),
|
||||
GenesisValidatorsRoot: hexutil.Encode(st.GenesisValidatorsRoot()),
|
||||
Slot: fmt.Sprintf("%d", st.Slot()),
|
||||
Fork: ForkFromConsensus(st.Fork()),
|
||||
LatestBlockHeader: BeaconBlockHeaderFromConsensus(st.LatestBlockHeader()),
|
||||
BlockRoots: br,
|
||||
StateRoots: sr,
|
||||
HistoricalRoots: hr,
|
||||
Eth1Data: Eth1DataFromConsensus(st.Eth1Data()),
|
||||
Eth1DataVotes: votes,
|
||||
Eth1DepositIndex: fmt.Sprintf("%d", st.Eth1DepositIndex()),
|
||||
Validators: vals,
|
||||
Balances: bals,
|
||||
RandaoMixes: rm,
|
||||
Slashings: slashings,
|
||||
PreviousEpochParticipation: prevPart,
|
||||
CurrentEpochParticipation: currPart,
|
||||
JustificationBits: hexutil.Encode(st.JustificationBits()),
|
||||
PreviousJustifiedCheckpoint: CheckpointFromConsensus(st.PreviousJustifiedCheckpoint()),
|
||||
CurrentJustifiedCheckpoint: CheckpointFromConsensus(st.CurrentJustifiedCheckpoint()),
|
||||
FinalizedCheckpoint: CheckpointFromConsensus(st.FinalizedCheckpoint()),
|
||||
InactivityScores: is,
|
||||
CurrentSyncCommittee: SyncCommitteeFromConsensus(currSc),
|
||||
NextSyncCommittee: SyncCommitteeFromConsensus(nextSc),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func BeaconStateBellatrixFromConsensus(st beaconState.BeaconState) (*BeaconStateBellatrix, error) {
|
||||
srcBr := st.BlockRoots()
|
||||
br := make([]string, len(srcBr))
|
||||
for i, r := range srcBr {
|
||||
br[i] = hexutil.Encode(r)
|
||||
}
|
||||
srcSr := st.StateRoots()
|
||||
sr := make([]string, len(srcSr))
|
||||
for i, r := range srcSr {
|
||||
sr[i] = hexutil.Encode(r)
|
||||
}
|
||||
srcHr, err := st.HistoricalRoots()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
hr := make([]string, len(srcHr))
|
||||
for i, r := range srcHr {
|
||||
hr[i] = hexutil.Encode(r)
|
||||
}
|
||||
srcVotes := st.Eth1DataVotes()
|
||||
votes := make([]*Eth1Data, len(srcVotes))
|
||||
for i, e := range srcVotes {
|
||||
votes[i] = Eth1DataFromConsensus(e)
|
||||
}
|
||||
srcVals := st.Validators()
|
||||
vals := make([]*Validator, len(srcVals))
|
||||
for i, v := range srcVals {
|
||||
vals[i] = ValidatorFromConsensus(v)
|
||||
}
|
||||
srcBals := st.Balances()
|
||||
bals := make([]string, len(srcBals))
|
||||
for i, b := range srcBals {
|
||||
bals[i] = fmt.Sprintf("%d", b)
|
||||
}
|
||||
srcRm := st.RandaoMixes()
|
||||
rm := make([]string, len(srcRm))
|
||||
for i, m := range srcRm {
|
||||
rm[i] = hexutil.Encode(m)
|
||||
}
|
||||
srcSlashings := st.Slashings()
|
||||
slashings := make([]string, len(srcSlashings))
|
||||
for i, s := range srcSlashings {
|
||||
slashings[i] = fmt.Sprintf("%d", s)
|
||||
}
|
||||
srcPrevPart, err := st.PreviousEpochParticipation()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
prevPart := make([]string, len(srcPrevPart))
|
||||
for i, p := range srcPrevPart {
|
||||
prevPart[i] = fmt.Sprintf("%d", p)
|
||||
}
|
||||
srcCurrPart, err := st.CurrentEpochParticipation()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
currPart := make([]string, len(srcCurrPart))
|
||||
for i, p := range srcCurrPart {
|
||||
currPart[i] = fmt.Sprintf("%d", p)
|
||||
}
|
||||
srcIs, err := st.InactivityScores()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
is := make([]string, len(srcIs))
|
||||
for i, s := range srcIs {
|
||||
is[i] = fmt.Sprintf("%d", s)
|
||||
}
|
||||
currSc, err := st.CurrentSyncCommittee()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
nextSc, err := st.NextSyncCommittee()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
execData, err := st.LatestExecutionPayloadHeader()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
srcPayload, ok := execData.Proto().(*enginev1.ExecutionPayloadHeader)
|
||||
if !ok {
|
||||
return nil, errPayloadHeaderNotFound
|
||||
}
|
||||
payload, err := ExecutionPayloadHeaderFromConsensus(srcPayload)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &BeaconStateBellatrix{
|
||||
GenesisTime: fmt.Sprintf("%d", st.GenesisTime()),
|
||||
GenesisValidatorsRoot: hexutil.Encode(st.GenesisValidatorsRoot()),
|
||||
Slot: fmt.Sprintf("%d", st.Slot()),
|
||||
Fork: ForkFromConsensus(st.Fork()),
|
||||
LatestBlockHeader: BeaconBlockHeaderFromConsensus(st.LatestBlockHeader()),
|
||||
BlockRoots: br,
|
||||
StateRoots: sr,
|
||||
HistoricalRoots: hr,
|
||||
Eth1Data: Eth1DataFromConsensus(st.Eth1Data()),
|
||||
Eth1DataVotes: votes,
|
||||
Eth1DepositIndex: fmt.Sprintf("%d", st.Eth1DepositIndex()),
|
||||
Validators: vals,
|
||||
Balances: bals,
|
||||
RandaoMixes: rm,
|
||||
Slashings: slashings,
|
||||
PreviousEpochParticipation: prevPart,
|
||||
CurrentEpochParticipation: currPart,
|
||||
JustificationBits: hexutil.Encode(st.JustificationBits()),
|
||||
PreviousJustifiedCheckpoint: CheckpointFromConsensus(st.PreviousJustifiedCheckpoint()),
|
||||
CurrentJustifiedCheckpoint: CheckpointFromConsensus(st.CurrentJustifiedCheckpoint()),
|
||||
FinalizedCheckpoint: CheckpointFromConsensus(st.FinalizedCheckpoint()),
|
||||
InactivityScores: is,
|
||||
CurrentSyncCommittee: SyncCommitteeFromConsensus(currSc),
|
||||
NextSyncCommittee: SyncCommitteeFromConsensus(nextSc),
|
||||
LatestExecutionPayloadHeader: payload,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func BeaconStateCapellaFromConsensus(st beaconState.BeaconState) (*BeaconStateCapella, error) {
|
||||
srcBr := st.BlockRoots()
|
||||
br := make([]string, len(srcBr))
|
||||
for i, r := range srcBr {
|
||||
br[i] = hexutil.Encode(r)
|
||||
}
|
||||
srcSr := st.StateRoots()
|
||||
sr := make([]string, len(srcSr))
|
||||
for i, r := range srcSr {
|
||||
sr[i] = hexutil.Encode(r)
|
||||
}
|
||||
srcHr, err := st.HistoricalRoots()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
hr := make([]string, len(srcHr))
|
||||
for i, r := range srcHr {
|
||||
hr[i] = hexutil.Encode(r)
|
||||
}
|
||||
srcVotes := st.Eth1DataVotes()
|
||||
votes := make([]*Eth1Data, len(srcVotes))
|
||||
for i, e := range srcVotes {
|
||||
votes[i] = Eth1DataFromConsensus(e)
|
||||
}
|
||||
srcVals := st.Validators()
|
||||
vals := make([]*Validator, len(srcVals))
|
||||
for i, v := range srcVals {
|
||||
vals[i] = ValidatorFromConsensus(v)
|
||||
}
|
||||
srcBals := st.Balances()
|
||||
bals := make([]string, len(srcBals))
|
||||
for i, b := range srcBals {
|
||||
bals[i] = fmt.Sprintf("%d", b)
|
||||
}
|
||||
srcRm := st.RandaoMixes()
|
||||
rm := make([]string, len(srcRm))
|
||||
for i, m := range srcRm {
|
||||
rm[i] = hexutil.Encode(m)
|
||||
}
|
||||
srcSlashings := st.Slashings()
|
||||
slashings := make([]string, len(srcSlashings))
|
||||
for i, s := range srcSlashings {
|
||||
slashings[i] = fmt.Sprintf("%d", s)
|
||||
}
|
||||
srcPrevPart, err := st.PreviousEpochParticipation()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
prevPart := make([]string, len(srcPrevPart))
|
||||
for i, p := range srcPrevPart {
|
||||
prevPart[i] = fmt.Sprintf("%d", p)
|
||||
}
|
||||
srcCurrPart, err := st.CurrentEpochParticipation()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
currPart := make([]string, len(srcCurrPart))
|
||||
for i, p := range srcCurrPart {
|
||||
currPart[i] = fmt.Sprintf("%d", p)
|
||||
}
|
||||
srcIs, err := st.InactivityScores()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
is := make([]string, len(srcIs))
|
||||
for i, s := range srcIs {
|
||||
is[i] = fmt.Sprintf("%d", s)
|
||||
}
|
||||
currSc, err := st.CurrentSyncCommittee()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
nextSc, err := st.NextSyncCommittee()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
execData, err := st.LatestExecutionPayloadHeader()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
srcPayload, ok := execData.Proto().(*enginev1.ExecutionPayloadHeaderCapella)
|
||||
if !ok {
|
||||
return nil, errPayloadHeaderNotFound
|
||||
}
|
||||
payload, err := ExecutionPayloadHeaderCapellaFromConsensus(srcPayload)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
srcHs, err := st.HistoricalSummaries()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
hs := make([]*HistoricalSummary, len(srcHs))
|
||||
for i, s := range srcHs {
|
||||
hs[i] = HistoricalSummaryFromConsensus(s)
|
||||
}
|
||||
nwi, err := st.NextWithdrawalIndex()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
nwvi, err := st.NextWithdrawalValidatorIndex()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &BeaconStateCapella{
|
||||
GenesisTime: fmt.Sprintf("%d", st.GenesisTime()),
|
||||
GenesisValidatorsRoot: hexutil.Encode(st.GenesisValidatorsRoot()),
|
||||
Slot: fmt.Sprintf("%d", st.Slot()),
|
||||
Fork: ForkFromConsensus(st.Fork()),
|
||||
LatestBlockHeader: BeaconBlockHeaderFromConsensus(st.LatestBlockHeader()),
|
||||
BlockRoots: br,
|
||||
StateRoots: sr,
|
||||
HistoricalRoots: hr,
|
||||
Eth1Data: Eth1DataFromConsensus(st.Eth1Data()),
|
||||
Eth1DataVotes: votes,
|
||||
Eth1DepositIndex: fmt.Sprintf("%d", st.Eth1DepositIndex()),
|
||||
Validators: vals,
|
||||
Balances: bals,
|
||||
RandaoMixes: rm,
|
||||
Slashings: slashings,
|
||||
PreviousEpochParticipation: prevPart,
|
||||
CurrentEpochParticipation: currPart,
|
||||
JustificationBits: hexutil.Encode(st.JustificationBits()),
|
||||
PreviousJustifiedCheckpoint: CheckpointFromConsensus(st.PreviousJustifiedCheckpoint()),
|
||||
CurrentJustifiedCheckpoint: CheckpointFromConsensus(st.CurrentJustifiedCheckpoint()),
|
||||
FinalizedCheckpoint: CheckpointFromConsensus(st.FinalizedCheckpoint()),
|
||||
InactivityScores: is,
|
||||
CurrentSyncCommittee: SyncCommitteeFromConsensus(currSc),
|
||||
NextSyncCommittee: SyncCommitteeFromConsensus(nextSc),
|
||||
LatestExecutionPayloadHeader: payload,
|
||||
NextWithdrawalIndex: fmt.Sprintf("%d", nwi),
|
||||
NextWithdrawalValidatorIndex: fmt.Sprintf("%d", nwvi),
|
||||
HistoricalSummaries: hs,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func BeaconStateDenebFromConsensus(st beaconState.BeaconState) (*BeaconStateDeneb, error) {
|
||||
srcBr := st.BlockRoots()
|
||||
br := make([]string, len(srcBr))
|
||||
for i, r := range srcBr {
|
||||
br[i] = hexutil.Encode(r)
|
||||
}
|
||||
srcSr := st.StateRoots()
|
||||
sr := make([]string, len(srcSr))
|
||||
for i, r := range srcSr {
|
||||
sr[i] = hexutil.Encode(r)
|
||||
}
|
||||
srcHr, err := st.HistoricalRoots()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
hr := make([]string, len(srcHr))
|
||||
for i, r := range srcHr {
|
||||
hr[i] = hexutil.Encode(r)
|
||||
}
|
||||
srcVotes := st.Eth1DataVotes()
|
||||
votes := make([]*Eth1Data, len(srcVotes))
|
||||
for i, e := range srcVotes {
|
||||
votes[i] = Eth1DataFromConsensus(e)
|
||||
}
|
||||
srcVals := st.Validators()
|
||||
vals := make([]*Validator, len(srcVals))
|
||||
for i, v := range srcVals {
|
||||
vals[i] = ValidatorFromConsensus(v)
|
||||
}
|
||||
srcBals := st.Balances()
|
||||
bals := make([]string, len(srcBals))
|
||||
for i, b := range srcBals {
|
||||
bals[i] = fmt.Sprintf("%d", b)
|
||||
}
|
||||
srcRm := st.RandaoMixes()
|
||||
rm := make([]string, len(srcRm))
|
||||
for i, m := range srcRm {
|
||||
rm[i] = hexutil.Encode(m)
|
||||
}
|
||||
srcSlashings := st.Slashings()
|
||||
slashings := make([]string, len(srcSlashings))
|
||||
for i, s := range srcSlashings {
|
||||
slashings[i] = fmt.Sprintf("%d", s)
|
||||
}
|
||||
srcPrevPart, err := st.PreviousEpochParticipation()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
prevPart := make([]string, len(srcPrevPart))
|
||||
for i, p := range srcPrevPart {
|
||||
prevPart[i] = fmt.Sprintf("%d", p)
|
||||
}
|
||||
srcCurrPart, err := st.CurrentEpochParticipation()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
currPart := make([]string, len(srcCurrPart))
|
||||
for i, p := range srcCurrPart {
|
||||
currPart[i] = fmt.Sprintf("%d", p)
|
||||
}
|
||||
srcIs, err := st.InactivityScores()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
is := make([]string, len(srcIs))
|
||||
for i, s := range srcIs {
|
||||
is[i] = fmt.Sprintf("%d", s)
|
||||
}
|
||||
currSc, err := st.CurrentSyncCommittee()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
nextSc, err := st.NextSyncCommittee()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
execData, err := st.LatestExecutionPayloadHeader()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
srcPayload, ok := execData.Proto().(*enginev1.ExecutionPayloadHeaderDeneb)
|
||||
if !ok {
|
||||
return nil, errPayloadHeaderNotFound
|
||||
}
|
||||
payload, err := ExecutionPayloadHeaderDenebFromConsensus(srcPayload)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
srcHs, err := st.HistoricalSummaries()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
hs := make([]*HistoricalSummary, len(srcHs))
|
||||
for i, s := range srcHs {
|
||||
hs[i] = HistoricalSummaryFromConsensus(s)
|
||||
}
|
||||
nwi, err := st.NextWithdrawalIndex()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
nwvi, err := st.NextWithdrawalValidatorIndex()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &BeaconStateDeneb{
|
||||
GenesisTime: fmt.Sprintf("%d", st.GenesisTime()),
|
||||
GenesisValidatorsRoot: hexutil.Encode(st.GenesisValidatorsRoot()),
|
||||
Slot: fmt.Sprintf("%d", st.Slot()),
|
||||
Fork: ForkFromConsensus(st.Fork()),
|
||||
LatestBlockHeader: BeaconBlockHeaderFromConsensus(st.LatestBlockHeader()),
|
||||
BlockRoots: br,
|
||||
StateRoots: sr,
|
||||
HistoricalRoots: hr,
|
||||
Eth1Data: Eth1DataFromConsensus(st.Eth1Data()),
|
||||
Eth1DataVotes: votes,
|
||||
Eth1DepositIndex: fmt.Sprintf("%d", st.Eth1DepositIndex()),
|
||||
Validators: vals,
|
||||
Balances: bals,
|
||||
RandaoMixes: rm,
|
||||
Slashings: slashings,
|
||||
PreviousEpochParticipation: prevPart,
|
||||
CurrentEpochParticipation: currPart,
|
||||
JustificationBits: hexutil.Encode(st.JustificationBits()),
|
||||
PreviousJustifiedCheckpoint: CheckpointFromConsensus(st.PreviousJustifiedCheckpoint()),
|
||||
CurrentJustifiedCheckpoint: CheckpointFromConsensus(st.CurrentJustifiedCheckpoint()),
|
||||
FinalizedCheckpoint: CheckpointFromConsensus(st.FinalizedCheckpoint()),
|
||||
InactivityScores: is,
|
||||
CurrentSyncCommittee: SyncCommitteeFromConsensus(currSc),
|
||||
NextSyncCommittee: SyncCommitteeFromConsensus(nextSc),
|
||||
LatestExecutionPayloadHeader: payload,
|
||||
NextWithdrawalIndex: fmt.Sprintf("%d", nwi),
|
||||
NextWithdrawalValidatorIndex: fmt.Sprintf("%d", nwvi),
|
||||
HistoricalSummaries: hs,
|
||||
}, nil
|
||||
}
|
||||
26
api/server/structs/conversions_test.go
Normal file
26
api/server/structs/conversions_test.go
Normal file
@@ -0,0 +1,26 @@
|
||||
package structs
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
eth "github.com/prysmaticlabs/prysm/v5/proto/prysm/v1alpha1"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
)
|
||||
|
||||
func TestDepositSnapshotFromConsensus(t *testing.T) {
|
||||
ds := ð.DepositSnapshot{
|
||||
Finalized: [][]byte{{0xde, 0xad, 0xbe, 0xef}, {0xca, 0xfe, 0xba, 0xbe}},
|
||||
DepositRoot: []byte{0xab, 0xcd},
|
||||
DepositCount: 12345,
|
||||
ExecutionHash: []byte{0x12, 0x34},
|
||||
ExecutionDepth: 67890,
|
||||
}
|
||||
|
||||
res := DepositSnapshotFromConsensus(ds)
|
||||
require.NotNil(t, res)
|
||||
require.DeepEqual(t, []string{"0xdeadbeef", "0xcafebabe"}, res.Finalized)
|
||||
require.Equal(t, "0xabcd", res.DepositRoot)
|
||||
require.Equal(t, "12345", res.DepositCount)
|
||||
require.Equal(t, "0x1234", res.ExecutionBlockHash)
|
||||
require.Equal(t, "67890", res.ExecutionBlockHeight)
|
||||
}
|
||||
@@ -1,52 +1,45 @@
|
||||
package beacon
|
||||
package structs
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/beacon-chain/rpc/eth/shared"
|
||||
)
|
||||
|
||||
type BlockRootResponse struct {
|
||||
Data *struct {
|
||||
Root string `json:"root"`
|
||||
} `json:"data"`
|
||||
ExecutionOptimistic bool `json:"execution_optimistic"`
|
||||
Finalized bool `json:"finalized"`
|
||||
Data *BlockRoot `json:"data"`
|
||||
ExecutionOptimistic bool `json:"execution_optimistic"`
|
||||
Finalized bool `json:"finalized"`
|
||||
}
|
||||
|
||||
type BlockRoot struct {
|
||||
Root string `json:"root"`
|
||||
}
|
||||
|
||||
type GetCommitteesResponse struct {
|
||||
Data []*shared.Committee `json:"data"`
|
||||
ExecutionOptimistic bool `json:"execution_optimistic"`
|
||||
Finalized bool `json:"finalized"`
|
||||
}
|
||||
|
||||
type DepositContractResponse struct {
|
||||
Data *struct {
|
||||
ChainId string `json:"chain_id"`
|
||||
Address string `json:"address"`
|
||||
} `json:"data"`
|
||||
Data []*Committee `json:"data"`
|
||||
ExecutionOptimistic bool `json:"execution_optimistic"`
|
||||
Finalized bool `json:"finalized"`
|
||||
}
|
||||
|
||||
type ListAttestationsResponse struct {
|
||||
Data []*shared.Attestation `json:"data"`
|
||||
Data []*Attestation `json:"data"`
|
||||
}
|
||||
|
||||
type SubmitAttestationsRequest struct {
|
||||
Data []*shared.Attestation `json:"data"`
|
||||
Data []*Attestation `json:"data"`
|
||||
}
|
||||
|
||||
type ListVoluntaryExitsResponse struct {
|
||||
Data []*shared.SignedVoluntaryExit `json:"data"`
|
||||
Data []*SignedVoluntaryExit `json:"data"`
|
||||
}
|
||||
|
||||
type SubmitSyncCommitteeSignaturesRequest struct {
|
||||
Data []*shared.SyncCommitteeMessage `json:"data"`
|
||||
Data []*SyncCommitteeMessage `json:"data"`
|
||||
}
|
||||
|
||||
type GetStateForkResponse struct {
|
||||
Data *shared.Fork `json:"data"`
|
||||
ExecutionOptimistic bool `json:"execution_optimistic"`
|
||||
Finalized bool `json:"finalized"`
|
||||
Data *Fork `json:"data"`
|
||||
ExecutionOptimistic bool `json:"execution_optimistic"`
|
||||
Finalized bool `json:"finalized"`
|
||||
}
|
||||
|
||||
type GetFinalityCheckpointsResponse struct {
|
||||
@@ -56,9 +49,9 @@ type GetFinalityCheckpointsResponse struct {
|
||||
}
|
||||
|
||||
type FinalityCheckpoints struct {
|
||||
PreviousJustified *shared.Checkpoint `json:"previous_justified"`
|
||||
CurrentJustified *shared.Checkpoint `json:"current_justified"`
|
||||
Finalized *shared.Checkpoint `json:"finalized"`
|
||||
PreviousJustified *Checkpoint `json:"previous_justified"`
|
||||
CurrentJustified *Checkpoint `json:"current_justified"`
|
||||
Finalized *Checkpoint `json:"finalized"`
|
||||
}
|
||||
|
||||
type GetGenesisResponse struct {
|
||||
@@ -72,15 +65,20 @@ type Genesis struct {
|
||||
}
|
||||
|
||||
type GetBlockHeadersResponse struct {
|
||||
Data []*shared.SignedBeaconBlockHeaderContainer `json:"data"`
|
||||
ExecutionOptimistic bool `json:"execution_optimistic"`
|
||||
Finalized bool `json:"finalized"`
|
||||
Data []*SignedBeaconBlockHeaderContainer `json:"data"`
|
||||
ExecutionOptimistic bool `json:"execution_optimistic"`
|
||||
Finalized bool `json:"finalized"`
|
||||
}
|
||||
|
||||
type GetBlockHeaderResponse struct {
|
||||
ExecutionOptimistic bool `json:"execution_optimistic"`
|
||||
Finalized bool `json:"finalized"`
|
||||
Data *shared.SignedBeaconBlockHeaderContainer `json:"data"`
|
||||
ExecutionOptimistic bool `json:"execution_optimistic"`
|
||||
Finalized bool `json:"finalized"`
|
||||
Data *SignedBeaconBlockHeaderContainer `json:"data"`
|
||||
}
|
||||
|
||||
type GetValidatorsRequest struct {
|
||||
Ids []string `json:"ids"`
|
||||
Statuses []string `json:"statuses"`
|
||||
}
|
||||
|
||||
type GetValidatorsResponse struct {
|
||||
@@ -108,17 +106,6 @@ type ValidatorContainer struct {
|
||||
Validator *Validator `json:"validator"`
|
||||
}
|
||||
|
||||
type Validator struct {
|
||||
Pubkey string `json:"pubkey"`
|
||||
WithdrawalCredentials string `json:"withdrawal_credentials"`
|
||||
EffectiveBalance string `json:"effective_balance"`
|
||||
Slashed bool `json:"slashed"`
|
||||
ActivationEligibilityEpoch string `json:"activation_eligibility_epoch"`
|
||||
ActivationEpoch string `json:"activation_epoch"`
|
||||
ExitEpoch string `json:"exit_epoch"`
|
||||
WithdrawableEpoch string `json:"withdrawable_epoch"`
|
||||
}
|
||||
|
||||
type ValidatorBalance struct {
|
||||
Index string `json:"index"`
|
||||
Balance string `json:"balance"`
|
||||
@@ -141,9 +128,9 @@ type SignedBlock struct {
|
||||
}
|
||||
|
||||
type GetBlockAttestationsResponse struct {
|
||||
ExecutionOptimistic bool `json:"execution_optimistic"`
|
||||
Finalized bool `json:"finalized"`
|
||||
Data []*shared.Attestation `json:"data"`
|
||||
ExecutionOptimistic bool `json:"execution_optimistic"`
|
||||
Finalized bool `json:"finalized"`
|
||||
Data []*Attestation `json:"data"`
|
||||
}
|
||||
|
||||
type GetStateRootResponse struct {
|
||||
@@ -178,5 +165,34 @@ type SyncCommitteeValidators struct {
|
||||
}
|
||||
|
||||
type BLSToExecutionChangesPoolResponse struct {
|
||||
Data []*shared.SignedBLSToExecutionChange `json:"data"`
|
||||
Data []*SignedBLSToExecutionChange `json:"data"`
|
||||
}
|
||||
|
||||
type GetAttesterSlashingsResponse struct {
|
||||
Data []*AttesterSlashing `json:"data"`
|
||||
}
|
||||
|
||||
type GetProposerSlashingsResponse struct {
|
||||
Data []*ProposerSlashing `json:"data"`
|
||||
}
|
||||
|
||||
type GetWeakSubjectivityResponse struct {
|
||||
Data *WeakSubjectivityData `json:"data"`
|
||||
}
|
||||
|
||||
type WeakSubjectivityData struct {
|
||||
WsCheckpoint *Checkpoint `json:"ws_checkpoint"`
|
||||
StateRoot string `json:"state_root"`
|
||||
}
|
||||
|
||||
type GetDepositSnapshotResponse struct {
|
||||
Data *DepositSnapshot `json:"data"`
|
||||
}
|
||||
|
||||
type DepositSnapshot struct {
|
||||
Finalized []string `json:"finalized"`
|
||||
DepositRoot string `json:"deposit_root"`
|
||||
DepositCount string `json:"deposit_count"`
|
||||
ExecutionBlockHash string `json:"execution_block_hash"`
|
||||
ExecutionBlockHeight string `json:"execution_block_height"`
|
||||
}
|
||||
14
api/server/structs/endpoints_blob.go
Normal file
14
api/server/structs/endpoints_blob.go
Normal file
@@ -0,0 +1,14 @@
|
||||
package structs
|
||||
|
||||
type SidecarsResponse struct {
|
||||
Data []*Sidecar `json:"data"`
|
||||
}
|
||||
|
||||
type Sidecar struct {
|
||||
Index string `json:"index"`
|
||||
Blob string `json:"blob"`
|
||||
SignedBeaconBlockHeader *SignedBeaconBlockHeader `json:"signed_block_header"`
|
||||
KzgCommitment string `json:"kzg_commitment"`
|
||||
KzgProof string `json:"kzg_proof"`
|
||||
CommitmentInclusionProof []string `json:"kzg_commitment_inclusion_proof"`
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package builder
|
||||
package structs
|
||||
|
||||
type ExpectedWithdrawalsResponse struct {
|
||||
Data []*ExpectedWithdrawal `json:"data"`
|
||||
18
api/server/structs/endpoints_config.go
Normal file
18
api/server/structs/endpoints_config.go
Normal file
@@ -0,0 +1,18 @@
|
||||
package structs
|
||||
|
||||
type GetDepositContractResponse struct {
|
||||
Data *DepositContractData `json:"data"`
|
||||
}
|
||||
|
||||
type DepositContractData struct {
|
||||
ChainId string `json:"chain_id"`
|
||||
Address string `json:"address"`
|
||||
}
|
||||
|
||||
type GetForkScheduleResponse struct {
|
||||
Data []*Fork `json:"data"`
|
||||
}
|
||||
|
||||
type GetSpecResponse struct {
|
||||
Data interface{} `json:"data"`
|
||||
}
|
||||
57
api/server/structs/endpoints_debug.go
Normal file
57
api/server/structs/endpoints_debug.go
Normal file
@@ -0,0 +1,57 @@
|
||||
package structs
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
)
|
||||
|
||||
type GetBeaconStateV2Response struct {
|
||||
Version string `json:"version"`
|
||||
ExecutionOptimistic bool `json:"execution_optimistic"`
|
||||
Finalized bool `json:"finalized"`
|
||||
Data json.RawMessage `json:"data"` // represents the state values based on the version
|
||||
}
|
||||
|
||||
type GetForkChoiceHeadsV2Response struct {
|
||||
Data []*ForkChoiceHead `json:"data"`
|
||||
}
|
||||
|
||||
type ForkChoiceHead struct {
|
||||
Root string `json:"root"`
|
||||
Slot string `json:"slot"`
|
||||
ExecutionOptimistic bool `json:"execution_optimistic"`
|
||||
}
|
||||
|
||||
type GetForkChoiceDumpResponse struct {
|
||||
JustifiedCheckpoint *Checkpoint `json:"justified_checkpoint"`
|
||||
FinalizedCheckpoint *Checkpoint `json:"finalized_checkpoint"`
|
||||
ForkChoiceNodes []*ForkChoiceNode `json:"fork_choice_nodes"`
|
||||
ExtraData *ForkChoiceDumpExtraData `json:"extra_data"`
|
||||
}
|
||||
|
||||
type ForkChoiceDumpExtraData struct {
|
||||
UnrealizedJustifiedCheckpoint *Checkpoint `json:"unrealized_justified_checkpoint"`
|
||||
UnrealizedFinalizedCheckpoint *Checkpoint `json:"unrealized_finalized_checkpoint"`
|
||||
ProposerBoostRoot string `json:"proposer_boost_root"`
|
||||
PreviousProposerBoostRoot string `json:"previous_proposer_boost_root"`
|
||||
HeadRoot string `json:"head_root"`
|
||||
}
|
||||
|
||||
type ForkChoiceNode struct {
|
||||
Slot string `json:"slot"`
|
||||
BlockRoot string `json:"block_root"`
|
||||
ParentRoot string `json:"parent_root"`
|
||||
JustifiedEpoch string `json:"justified_epoch"`
|
||||
FinalizedEpoch string `json:"finalized_epoch"`
|
||||
Weight string `json:"weight"`
|
||||
Validity string `json:"validity"`
|
||||
ExecutionBlockHash string `json:"execution_block_hash"`
|
||||
ExtraData *ForkChoiceNodeExtraData `json:"extra_data"`
|
||||
}
|
||||
|
||||
type ForkChoiceNodeExtraData struct {
|
||||
UnrealizedJustifiedEpoch string `json:"unrealized_justified_epoch"`
|
||||
UnrealizedFinalizedEpoch string `json:"unrealized_finalized_epoch"`
|
||||
Balance string `json:"balance"`
|
||||
ExecutionOptimistic bool `json:"execution_optimistic"`
|
||||
TimeStamp string `json:"timestamp"`
|
||||
}
|
||||
116
api/server/structs/endpoints_events.go
Normal file
116
api/server/structs/endpoints_events.go
Normal file
@@ -0,0 +1,116 @@
|
||||
package structs
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
)
|
||||
|
||||
type HeadEvent struct {
|
||||
Slot string `json:"slot"`
|
||||
Block string `json:"block"`
|
||||
State string `json:"state"`
|
||||
EpochTransition bool `json:"epoch_transition"`
|
||||
ExecutionOptimistic bool `json:"execution_optimistic"`
|
||||
PreviousDutyDependentRoot string `json:"previous_duty_dependent_root"`
|
||||
CurrentDutyDependentRoot string `json:"current_duty_dependent_root"`
|
||||
}
|
||||
|
||||
type BlockEvent struct {
|
||||
Slot string `json:"slot"`
|
||||
Block string `json:"block"`
|
||||
ExecutionOptimistic bool `json:"execution_optimistic"`
|
||||
}
|
||||
|
||||
type AggregatedAttEventSource struct {
|
||||
Aggregate *Attestation `json:"aggregate"`
|
||||
}
|
||||
|
||||
type UnaggregatedAttEventSource struct {
|
||||
AggregationBits string `json:"aggregation_bits"`
|
||||
Data *AttestationData `json:"data"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type FinalizedCheckpointEvent struct {
|
||||
Block string `json:"block"`
|
||||
State string `json:"state"`
|
||||
Epoch string `json:"epoch"`
|
||||
ExecutionOptimistic bool `json:"execution_optimistic"`
|
||||
}
|
||||
|
||||
type ChainReorgEvent struct {
|
||||
Slot string `json:"slot"`
|
||||
Depth string `json:"depth"`
|
||||
OldHeadBlock string `json:"old_head_block"`
|
||||
NewHeadBlock string `json:"old_head_state"`
|
||||
OldHeadState string `json:"new_head_block"`
|
||||
NewHeadState string `json:"new_head_state"`
|
||||
Epoch string `json:"epoch"`
|
||||
ExecutionOptimistic bool `json:"execution_optimistic"`
|
||||
}
|
||||
|
||||
type PayloadAttributesEvent struct {
|
||||
Version string `json:"version"`
|
||||
Data json.RawMessage `json:"data"`
|
||||
}
|
||||
|
||||
type PayloadAttributesEventData struct {
|
||||
ProposerIndex string `json:"proposer_index"`
|
||||
ProposalSlot string `json:"proposal_slot"`
|
||||
ParentBlockNumber string `json:"parent_block_number"`
|
||||
ParentBlockRoot string `json:"parent_block_root"`
|
||||
ParentBlockHash string `json:"parent_block_hash"`
|
||||
PayloadAttributes json.RawMessage `json:"payload_attributes"`
|
||||
}
|
||||
|
||||
type PayloadAttributesV1 struct {
|
||||
Timestamp string `json:"timestamp"`
|
||||
PrevRandao string `json:"prev_randao"`
|
||||
SuggestedFeeRecipient string `json:"suggested_fee_recipient"`
|
||||
}
|
||||
|
||||
type PayloadAttributesV2 struct {
|
||||
Timestamp string `json:"timestamp"`
|
||||
PrevRandao string `json:"prev_randao"`
|
||||
SuggestedFeeRecipient string `json:"suggested_fee_recipient"`
|
||||
Withdrawals []*Withdrawal `json:"withdrawals"`
|
||||
}
|
||||
|
||||
type PayloadAttributesV3 struct {
|
||||
Timestamp string `json:"timestamp"`
|
||||
PrevRandao string `json:"prev_randao"`
|
||||
SuggestedFeeRecipient string `json:"suggested_fee_recipient"`
|
||||
Withdrawals []*Withdrawal `json:"withdrawals"`
|
||||
ParentBeaconBlockRoot string `json:"parent_beacon_block_root"`
|
||||
}
|
||||
|
||||
type BlobSidecarEvent struct {
|
||||
BlockRoot string `json:"block_root"`
|
||||
Index string `json:"index"`
|
||||
Slot string `json:"slot"`
|
||||
KzgCommitment string `json:"kzg_commitment"`
|
||||
VersionedHash string `json:"versioned_hash"`
|
||||
}
|
||||
|
||||
type LightClientFinalityUpdateEvent struct {
|
||||
Version string `json:"version"`
|
||||
Data *LightClientFinalityUpdate `json:"data"`
|
||||
}
|
||||
|
||||
type LightClientFinalityUpdate struct {
|
||||
AttestedHeader *BeaconBlockHeader `json:"attested_header"`
|
||||
FinalizedHeader *BeaconBlockHeader `json:"finalized_header"`
|
||||
FinalityBranch []string `json:"finality_branch"`
|
||||
SyncAggregate *SyncAggregate `json:"sync_aggregate"`
|
||||
SignatureSlot string `json:"signature_slot"`
|
||||
}
|
||||
|
||||
type LightClientOptimisticUpdateEvent struct {
|
||||
Version string `json:"version"`
|
||||
Data *LightClientOptimisticUpdate `json:"data"`
|
||||
}
|
||||
|
||||
type LightClientOptimisticUpdate struct {
|
||||
AttestedHeader *BeaconBlockHeader `json:"attested_header"`
|
||||
SyncAggregate *SyncAggregate `json:"sync_aggregate"`
|
||||
SignatureSlot string `json:"signature_slot"`
|
||||
}
|
||||
31
api/server/structs/endpoints_lightclient.go
Normal file
31
api/server/structs/endpoints_lightclient.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package structs
|
||||
|
||||
type LightClientBootstrapResponse struct {
|
||||
Version string `json:"version"`
|
||||
Data *LightClientBootstrap `json:"data"`
|
||||
}
|
||||
|
||||
type LightClientBootstrap struct {
|
||||
Header *BeaconBlockHeader `json:"header"`
|
||||
CurrentSyncCommittee *SyncCommittee `json:"current_sync_committee"`
|
||||
CurrentSyncCommitteeBranch []string `json:"current_sync_committee_branch"`
|
||||
}
|
||||
|
||||
type LightClientUpdate struct {
|
||||
AttestedHeader *BeaconBlockHeader `json:"attested_header"`
|
||||
NextSyncCommittee *SyncCommittee `json:"next_sync_committee,omitempty"`
|
||||
FinalizedHeader *BeaconBlockHeader `json:"finalized_header,omitempty"`
|
||||
SyncAggregate *SyncAggregate `json:"sync_aggregate"`
|
||||
NextSyncCommitteeBranch []string `json:"next_sync_committee_branch,omitempty"`
|
||||
FinalityBranch []string `json:"finality_branch,omitempty"`
|
||||
SignatureSlot string `json:"signature_slot"`
|
||||
}
|
||||
|
||||
type LightClientUpdateWithVersion struct {
|
||||
Version string `json:"version"`
|
||||
Data *LightClientUpdate `json:"data"`
|
||||
}
|
||||
|
||||
type LightClientUpdatesByRangeResponse struct {
|
||||
Updates []*LightClientUpdateWithVersion `json:"updates"`
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package node
|
||||
package structs
|
||||
|
||||
type SyncStatusResponse struct {
|
||||
Data *SyncStatusResponseData `json:"data"`
|
||||
@@ -63,3 +63,11 @@ type GetVersionResponse struct {
|
||||
type Version struct {
|
||||
Version string `json:"version"`
|
||||
}
|
||||
|
||||
type AddrRequest struct {
|
||||
Addr string `json:"addr"`
|
||||
}
|
||||
|
||||
type PeersResponse struct {
|
||||
Peers []*Peer `json:"peers"`
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package rewards
|
||||
package structs
|
||||
|
||||
type BlockRewardsResponse struct {
|
||||
Data *BlockRewards `json:"data"`
|
||||
@@ -31,6 +31,7 @@ type IdealAttestationReward struct {
|
||||
Head string `json:"head"`
|
||||
Target string `json:"target"`
|
||||
Source string `json:"source"`
|
||||
Inactivity string `json:"inactivity"`
|
||||
}
|
||||
|
||||
type TotalAttestationReward struct {
|
||||
@@ -38,7 +39,7 @@ type TotalAttestationReward struct {
|
||||
Head string `json:"head"`
|
||||
Target string `json:"target"`
|
||||
Source string `json:"source"`
|
||||
InclusionDelay string `json:"inclusion_delay"`
|
||||
Inactivity string `json:"inactivity"`
|
||||
}
|
||||
|
||||
type SyncCommitteeRewardsResponse struct {
|
||||
@@ -1,37 +1,37 @@
|
||||
package validator
|
||||
package structs
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/beacon-chain/rpc/eth/shared"
|
||||
"github.com/prysmaticlabs/prysm/v5/consensus-types/primitives"
|
||||
)
|
||||
|
||||
type AggregateAttestationResponse struct {
|
||||
Data *shared.Attestation `json:"data"`
|
||||
Data *Attestation `json:"data"`
|
||||
}
|
||||
|
||||
type SubmitContributionAndProofsRequest struct {
|
||||
Data []*shared.SignedContributionAndProof `json:"data"`
|
||||
Data []*SignedContributionAndProof `json:"data"`
|
||||
}
|
||||
|
||||
type SubmitAggregateAndProofsRequest struct {
|
||||
Data []*shared.SignedAggregateAttestationAndProof `json:"data"`
|
||||
Data []*SignedAggregateAttestationAndProof `json:"data"`
|
||||
}
|
||||
|
||||
type SubmitSyncCommitteeSubscriptionsRequest struct {
|
||||
Data []*shared.SyncCommitteeSubscription `json:"data"`
|
||||
Data []*SyncCommitteeSubscription `json:"data"`
|
||||
}
|
||||
|
||||
type SubmitBeaconCommitteeSubscriptionsRequest struct {
|
||||
Data []*shared.BeaconCommitteeSubscription `json:"data"`
|
||||
Data []*BeaconCommitteeSubscription `json:"data"`
|
||||
}
|
||||
|
||||
type GetAttestationDataResponse struct {
|
||||
Data *shared.AttestationData `json:"data"`
|
||||
Data *AttestationData `json:"data"`
|
||||
}
|
||||
|
||||
type ProduceSyncCommitteeContributionResponse struct {
|
||||
Data *shared.SyncCommitteeContribution `json:"data"`
|
||||
Data *SyncCommitteeContribution `json:"data"`
|
||||
}
|
||||
|
||||
type GetAttesterDutiesResponse struct {
|
||||
@@ -90,3 +90,31 @@ type Liveness struct {
|
||||
Index string `json:"index"`
|
||||
IsLive bool `json:"is_live"`
|
||||
}
|
||||
|
||||
type GetValidatorCountResponse struct {
|
||||
ExecutionOptimistic string `json:"execution_optimistic"`
|
||||
Finalized string `json:"finalized"`
|
||||
Data []*ValidatorCount `json:"data"`
|
||||
}
|
||||
|
||||
type ValidatorCount struct {
|
||||
Status string `json:"status"`
|
||||
Count string `json:"count"`
|
||||
}
|
||||
|
||||
type GetValidatorPerformanceRequest struct {
|
||||
PublicKeys [][]byte `json:"public_keys,omitempty"`
|
||||
Indices []primitives.ValidatorIndex `json:"indices,omitempty"`
|
||||
}
|
||||
|
||||
type GetValidatorPerformanceResponse struct {
|
||||
PublicKeys [][]byte `json:"public_keys,omitempty"`
|
||||
CorrectlyVotedSource []bool `json:"correctly_voted_source,omitempty"`
|
||||
CorrectlyVotedTarget []bool `json:"correctly_voted_target,omitempty"`
|
||||
CorrectlyVotedHead []bool `json:"correctly_voted_head,omitempty"`
|
||||
CurrentEffectiveBalances []uint64 `json:"current_effective_balances,omitempty"`
|
||||
BalancesBeforeEpochTransition []uint64 `json:"balances_before_epoch_transition,omitempty"`
|
||||
BalancesAfterEpochTransition []uint64 `json:"balances_after_epoch_transition,omitempty"`
|
||||
MissingValidators [][]byte `json:"missing_validators,omitempty"`
|
||||
InactivityScores []uint64 `json:"inactivity_scores,omitempty"`
|
||||
}
|
||||
209
api/server/structs/other.go
Normal file
209
api/server/structs/other.go
Normal file
@@ -0,0 +1,209 @@
|
||||
package structs
|
||||
|
||||
type Validator struct {
|
||||
Pubkey string `json:"pubkey"`
|
||||
WithdrawalCredentials string `json:"withdrawal_credentials"`
|
||||
EffectiveBalance string `json:"effective_balance"`
|
||||
Slashed bool `json:"slashed"`
|
||||
ActivationEligibilityEpoch string `json:"activation_eligibility_epoch"`
|
||||
ActivationEpoch string `json:"activation_epoch"`
|
||||
ExitEpoch string `json:"exit_epoch"`
|
||||
WithdrawableEpoch string `json:"withdrawable_epoch"`
|
||||
}
|
||||
|
||||
type PendingAttestation struct {
|
||||
AggregationBits string `json:"aggregation_bits"`
|
||||
Data *AttestationData `json:"data"`
|
||||
InclusionDelay string `json:"inclusion_delay"`
|
||||
ProposerIndex string `json:"proposer_index"`
|
||||
}
|
||||
|
||||
type HistoricalSummary struct {
|
||||
BlockSummaryRoot string `json:"block_summary_root"`
|
||||
StateSummaryRoot string `json:"state_summary_root"`
|
||||
}
|
||||
|
||||
type Attestation struct {
|
||||
AggregationBits string `json:"aggregation_bits"`
|
||||
Data *AttestationData `json:"data"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type AttestationData struct {
|
||||
Slot string `json:"slot"`
|
||||
CommitteeIndex string `json:"index"`
|
||||
BeaconBlockRoot string `json:"beacon_block_root"`
|
||||
Source *Checkpoint `json:"source"`
|
||||
Target *Checkpoint `json:"target"`
|
||||
}
|
||||
|
||||
type Checkpoint struct {
|
||||
Epoch string `json:"epoch"`
|
||||
Root string `json:"root"`
|
||||
}
|
||||
|
||||
type Committee struct {
|
||||
Index string `json:"index"`
|
||||
Slot string `json:"slot"`
|
||||
Validators []string `json:"validators"`
|
||||
}
|
||||
|
||||
type SignedContributionAndProof struct {
|
||||
Message *ContributionAndProof `json:"message"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type ContributionAndProof struct {
|
||||
AggregatorIndex string `json:"aggregator_index"`
|
||||
Contribution *SyncCommitteeContribution `json:"contribution"`
|
||||
SelectionProof string `json:"selection_proof"`
|
||||
}
|
||||
|
||||
type SyncCommitteeContribution struct {
|
||||
Slot string `json:"slot"`
|
||||
BeaconBlockRoot string `json:"beacon_block_root"`
|
||||
SubcommitteeIndex string `json:"subcommittee_index"`
|
||||
AggregationBits string `json:"aggregation_bits"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type SignedAggregateAttestationAndProof struct {
|
||||
Message *AggregateAttestationAndProof `json:"message"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type AggregateAttestationAndProof struct {
|
||||
AggregatorIndex string `json:"aggregator_index"`
|
||||
Aggregate *Attestation `json:"aggregate"`
|
||||
SelectionProof string `json:"selection_proof"`
|
||||
}
|
||||
|
||||
type SyncCommitteeSubscription struct {
|
||||
ValidatorIndex string `json:"validator_index"`
|
||||
SyncCommitteeIndices []string `json:"sync_committee_indices"`
|
||||
UntilEpoch string `json:"until_epoch"`
|
||||
}
|
||||
|
||||
type BeaconCommitteeSubscription struct {
|
||||
ValidatorIndex string `json:"validator_index"`
|
||||
CommitteeIndex string `json:"committee_index"`
|
||||
CommitteesAtSlot string `json:"committees_at_slot"`
|
||||
Slot string `json:"slot"`
|
||||
IsAggregator bool `json:"is_aggregator"`
|
||||
}
|
||||
|
||||
type ValidatorRegistration struct {
|
||||
FeeRecipient string `json:"fee_recipient"`
|
||||
GasLimit string `json:"gas_limit"`
|
||||
Timestamp string `json:"timestamp"`
|
||||
Pubkey string `json:"pubkey"`
|
||||
}
|
||||
|
||||
type SignedValidatorRegistration struct {
|
||||
Message *ValidatorRegistration `json:"message"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type FeeRecipient struct {
|
||||
ValidatorIndex string `json:"validator_index"`
|
||||
FeeRecipient string `json:"fee_recipient"`
|
||||
}
|
||||
|
||||
type SignedVoluntaryExit struct {
|
||||
Message *VoluntaryExit `json:"message"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type VoluntaryExit struct {
|
||||
Epoch string `json:"epoch"`
|
||||
ValidatorIndex string `json:"validator_index"`
|
||||
}
|
||||
|
||||
type Fork struct {
|
||||
PreviousVersion string `json:"previous_version"`
|
||||
CurrentVersion string `json:"current_version"`
|
||||
Epoch string `json:"epoch"`
|
||||
}
|
||||
|
||||
type SignedBLSToExecutionChange struct {
|
||||
Message *BLSToExecutionChange `json:"message"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type BLSToExecutionChange struct {
|
||||
ValidatorIndex string `json:"validator_index"`
|
||||
FromBLSPubkey string `json:"from_bls_pubkey"`
|
||||
ToExecutionAddress string `json:"to_execution_address"`
|
||||
}
|
||||
|
||||
type SyncCommitteeMessage struct {
|
||||
Slot string `json:"slot"`
|
||||
BeaconBlockRoot string `json:"beacon_block_root"`
|
||||
ValidatorIndex string `json:"validator_index"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type SyncCommittee struct {
|
||||
Pubkeys []string `json:"pubkeys"`
|
||||
AggregatePubkey string `json:"aggregate_pubkey"`
|
||||
}
|
||||
|
||||
// SyncDetails contains information about node sync status.
|
||||
type SyncDetails struct {
|
||||
HeadSlot string `json:"head_slot"`
|
||||
SyncDistance string `json:"sync_distance"`
|
||||
IsSyncing bool `json:"is_syncing"`
|
||||
IsOptimistic bool `json:"is_optimistic"`
|
||||
ElOffline bool `json:"el_offline"`
|
||||
}
|
||||
|
||||
// SyncDetailsContainer is a wrapper for Data.
|
||||
type SyncDetailsContainer struct {
|
||||
Data *SyncDetails `json:"data"`
|
||||
}
|
||||
|
||||
type Eth1Data struct {
|
||||
DepositRoot string `json:"deposit_root"`
|
||||
DepositCount string `json:"deposit_count"`
|
||||
BlockHash string `json:"block_hash"`
|
||||
}
|
||||
|
||||
type ProposerSlashing struct {
|
||||
SignedHeader1 *SignedBeaconBlockHeader `json:"signed_header_1"`
|
||||
SignedHeader2 *SignedBeaconBlockHeader `json:"signed_header_2"`
|
||||
}
|
||||
|
||||
type AttesterSlashing struct {
|
||||
Attestation1 *IndexedAttestation `json:"attestation_1"`
|
||||
Attestation2 *IndexedAttestation `json:"attestation_2"`
|
||||
}
|
||||
|
||||
type Deposit struct {
|
||||
Proof []string `json:"proof"`
|
||||
Data *DepositData `json:"data"`
|
||||
}
|
||||
|
||||
type DepositData struct {
|
||||
Pubkey string `json:"pubkey"`
|
||||
WithdrawalCredentials string `json:"withdrawal_credentials"`
|
||||
Amount string `json:"amount"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type IndexedAttestation struct {
|
||||
AttestingIndices []string `json:"attesting_indices"`
|
||||
Data *AttestationData `json:"data"`
|
||||
Signature string `json:"signature"`
|
||||
}
|
||||
|
||||
type SyncAggregate struct {
|
||||
SyncCommitteeBits string `json:"sync_committee_bits"`
|
||||
SyncCommitteeSignature string `json:"sync_committee_signature"`
|
||||
}
|
||||
|
||||
type Withdrawal struct {
|
||||
WithdrawalIndex string `json:"index"`
|
||||
ValidatorIndex string `json:"validator_index"`
|
||||
ExecutionAddress string `json:"address"`
|
||||
Amount string `json:"amount"`
|
||||
}
|
||||
142
api/server/structs/state.go
Normal file
142
api/server/structs/state.go
Normal file
@@ -0,0 +1,142 @@
|
||||
package structs
|
||||
|
||||
type BeaconState struct {
|
||||
GenesisTime string `json:"genesis_time"`
|
||||
GenesisValidatorsRoot string `json:"genesis_validators_root"`
|
||||
Slot string `json:"slot"`
|
||||
Fork *Fork `json:"fork"`
|
||||
LatestBlockHeader *BeaconBlockHeader `json:"latest_block_header"`
|
||||
BlockRoots []string `json:"block_roots"`
|
||||
StateRoots []string `json:"state_roots"`
|
||||
HistoricalRoots []string `json:"historical_roots"`
|
||||
Eth1Data *Eth1Data `json:"eth1_data"`
|
||||
Eth1DataVotes []*Eth1Data `json:"eth1_data_votes"`
|
||||
Eth1DepositIndex string `json:"eth1_deposit_index"`
|
||||
Validators []*Validator `json:"validators"`
|
||||
Balances []string `json:"balances"`
|
||||
RandaoMixes []string `json:"randao_mixes"`
|
||||
Slashings []string `json:"slashings"`
|
||||
PreviousEpochAttestations []*PendingAttestation `json:"previous_epoch_attestations"`
|
||||
CurrentEpochAttestations []*PendingAttestation `json:"current_epoch_attestations"`
|
||||
JustificationBits string `json:"justification_bits"`
|
||||
PreviousJustifiedCheckpoint *Checkpoint `json:"previous_justified_checkpoint"`
|
||||
CurrentJustifiedCheckpoint *Checkpoint `json:"current_justified_checkpoint"`
|
||||
FinalizedCheckpoint *Checkpoint `json:"finalized_checkpoint"`
|
||||
}
|
||||
|
||||
type BeaconStateAltair struct {
|
||||
GenesisTime string `json:"genesis_time"`
|
||||
GenesisValidatorsRoot string `json:"genesis_validators_root"`
|
||||
Slot string `json:"slot"`
|
||||
Fork *Fork `json:"fork"`
|
||||
LatestBlockHeader *BeaconBlockHeader `json:"latest_block_header"`
|
||||
BlockRoots []string `json:"block_roots"`
|
||||
StateRoots []string `json:"state_roots"`
|
||||
HistoricalRoots []string `json:"historical_roots"`
|
||||
Eth1Data *Eth1Data `json:"eth1_data"`
|
||||
Eth1DataVotes []*Eth1Data `json:"eth1_data_votes"`
|
||||
Eth1DepositIndex string `json:"eth1_deposit_index"`
|
||||
Validators []*Validator `json:"validators"`
|
||||
Balances []string `json:"balances"`
|
||||
RandaoMixes []string `json:"randao_mixes"`
|
||||
Slashings []string `json:"slashings"`
|
||||
PreviousEpochParticipation []string `json:"previous_epoch_participation"`
|
||||
CurrentEpochParticipation []string `json:"current_epoch_participation"`
|
||||
JustificationBits string `json:"justification_bits"`
|
||||
PreviousJustifiedCheckpoint *Checkpoint `json:"previous_justified_checkpoint"`
|
||||
CurrentJustifiedCheckpoint *Checkpoint `json:"current_justified_checkpoint"`
|
||||
FinalizedCheckpoint *Checkpoint `json:"finalized_checkpoint"`
|
||||
InactivityScores []string `json:"inactivity_scores"`
|
||||
CurrentSyncCommittee *SyncCommittee `json:"current_sync_committee"`
|
||||
NextSyncCommittee *SyncCommittee `json:"next_sync_committee"`
|
||||
}
|
||||
|
||||
type BeaconStateBellatrix struct {
|
||||
GenesisTime string `json:"genesis_time"`
|
||||
GenesisValidatorsRoot string `json:"genesis_validators_root"`
|
||||
Slot string `json:"slot"`
|
||||
Fork *Fork `json:"fork"`
|
||||
LatestBlockHeader *BeaconBlockHeader `json:"latest_block_header"`
|
||||
BlockRoots []string `json:"block_roots"`
|
||||
StateRoots []string `json:"state_roots"`
|
||||
HistoricalRoots []string `json:"historical_roots"`
|
||||
Eth1Data *Eth1Data `json:"eth1_data"`
|
||||
Eth1DataVotes []*Eth1Data `json:"eth1_data_votes"`
|
||||
Eth1DepositIndex string `json:"eth1_deposit_index"`
|
||||
Validators []*Validator `json:"validators"`
|
||||
Balances []string `json:"balances"`
|
||||
RandaoMixes []string `json:"randao_mixes"`
|
||||
Slashings []string `json:"slashings"`
|
||||
PreviousEpochParticipation []string `json:"previous_epoch_participation"`
|
||||
CurrentEpochParticipation []string `json:"current_epoch_participation"`
|
||||
JustificationBits string `json:"justification_bits"`
|
||||
PreviousJustifiedCheckpoint *Checkpoint `json:"previous_justified_checkpoint"`
|
||||
CurrentJustifiedCheckpoint *Checkpoint `json:"current_justified_checkpoint"`
|
||||
FinalizedCheckpoint *Checkpoint `json:"finalized_checkpoint"`
|
||||
InactivityScores []string `json:"inactivity_scores"`
|
||||
CurrentSyncCommittee *SyncCommittee `json:"current_sync_committee"`
|
||||
NextSyncCommittee *SyncCommittee `json:"next_sync_committee"`
|
||||
LatestExecutionPayloadHeader *ExecutionPayloadHeader `json:"latest_execution_payload_header"`
|
||||
}
|
||||
|
||||
type BeaconStateCapella struct {
|
||||
GenesisTime string `json:"genesis_time"`
|
||||
GenesisValidatorsRoot string `json:"genesis_validators_root"`
|
||||
Slot string `json:"slot"`
|
||||
Fork *Fork `json:"fork"`
|
||||
LatestBlockHeader *BeaconBlockHeader `json:"latest_block_header"`
|
||||
BlockRoots []string `json:"block_roots"`
|
||||
StateRoots []string `json:"state_roots"`
|
||||
HistoricalRoots []string `json:"historical_roots"`
|
||||
Eth1Data *Eth1Data `json:"eth1_data"`
|
||||
Eth1DataVotes []*Eth1Data `json:"eth1_data_votes"`
|
||||
Eth1DepositIndex string `json:"eth1_deposit_index"`
|
||||
Validators []*Validator `json:"validators"`
|
||||
Balances []string `json:"balances"`
|
||||
RandaoMixes []string `json:"randao_mixes"`
|
||||
Slashings []string `json:"slashings"`
|
||||
PreviousEpochParticipation []string `json:"previous_epoch_participation"`
|
||||
CurrentEpochParticipation []string `json:"current_epoch_participation"`
|
||||
JustificationBits string `json:"justification_bits"`
|
||||
PreviousJustifiedCheckpoint *Checkpoint `json:"previous_justified_checkpoint"`
|
||||
CurrentJustifiedCheckpoint *Checkpoint `json:"current_justified_checkpoint"`
|
||||
FinalizedCheckpoint *Checkpoint `json:"finalized_checkpoint"`
|
||||
InactivityScores []string `json:"inactivity_scores"`
|
||||
CurrentSyncCommittee *SyncCommittee `json:"current_sync_committee"`
|
||||
NextSyncCommittee *SyncCommittee `json:"next_sync_committee"`
|
||||
LatestExecutionPayloadHeader *ExecutionPayloadHeaderCapella `json:"latest_execution_payload_header"`
|
||||
NextWithdrawalIndex string `json:"next_withdrawal_index"`
|
||||
NextWithdrawalValidatorIndex string `json:"next_withdrawal_validator_index"`
|
||||
HistoricalSummaries []*HistoricalSummary `json:"historical_summaries"`
|
||||
}
|
||||
|
||||
type BeaconStateDeneb struct {
|
||||
GenesisTime string `json:"genesis_time"`
|
||||
GenesisValidatorsRoot string `json:"genesis_validators_root"`
|
||||
Slot string `json:"slot"`
|
||||
Fork *Fork `json:"fork"`
|
||||
LatestBlockHeader *BeaconBlockHeader `json:"latest_block_header"`
|
||||
BlockRoots []string `json:"block_roots"`
|
||||
StateRoots []string `json:"state_roots"`
|
||||
HistoricalRoots []string `json:"historical_roots"`
|
||||
Eth1Data *Eth1Data `json:"eth1_data"`
|
||||
Eth1DataVotes []*Eth1Data `json:"eth1_data_votes"`
|
||||
Eth1DepositIndex string `json:"eth1_deposit_index"`
|
||||
Validators []*Validator `json:"validators"`
|
||||
Balances []string `json:"balances"`
|
||||
RandaoMixes []string `json:"randao_mixes"`
|
||||
Slashings []string `json:"slashings"`
|
||||
PreviousEpochParticipation []string `json:"previous_epoch_participation"`
|
||||
CurrentEpochParticipation []string `json:"current_epoch_participation"`
|
||||
JustificationBits string `json:"justification_bits"`
|
||||
PreviousJustifiedCheckpoint *Checkpoint `json:"previous_justified_checkpoint"`
|
||||
CurrentJustifiedCheckpoint *Checkpoint `json:"current_justified_checkpoint"`
|
||||
FinalizedCheckpoint *Checkpoint `json:"finalized_checkpoint"`
|
||||
InactivityScores []string `json:"inactivity_scores"`
|
||||
CurrentSyncCommittee *SyncCommittee `json:"current_sync_committee"`
|
||||
NextSyncCommittee *SyncCommittee `json:"next_sync_committee"`
|
||||
LatestExecutionPayloadHeader *ExecutionPayloadHeaderDeneb `json:"latest_execution_payload_header"`
|
||||
NextWithdrawalIndex string `json:"next_withdrawal_index"`
|
||||
NextWithdrawalValidatorIndex string `json:"next_withdrawal_validator_index"`
|
||||
HistoricalSummaries []*HistoricalSummary `json:"historical_summaries"`
|
||||
}
|
||||
@@ -3,8 +3,8 @@ package server
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/require"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
)
|
||||
|
||||
func TestNormalizeQueryValues(t *testing.T) {
|
||||
|
||||
@@ -8,7 +8,7 @@ go_library(
|
||||
"multilock.go",
|
||||
"scatter.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v4/async",
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/async",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = ["@com_github_sirupsen_logrus//:go_default_library"],
|
||||
)
|
||||
|
||||
@@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test")
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = ["abool.go"],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v4/async/abool",
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/async/abool",
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
|
||||
@@ -6,8 +6,8 @@ import (
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/async"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/require"
|
||||
"github.com/prysmaticlabs/prysm/v5/async"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
|
||||
@@ -7,10 +7,10 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/async"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/require"
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/util"
|
||||
"github.com/prysmaticlabs/prysm/v5/async"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/util"
|
||||
)
|
||||
|
||||
func TestDebounce_NoEvents(t *testing.T) {
|
||||
|
||||
@@ -6,7 +6,7 @@ go_library(
|
||||
"feed.go",
|
||||
"subscription.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/v4/async/event",
|
||||
importpath = "github.com/prysmaticlabs/prysm/v5/async/event",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = ["//time/mclock:go_default_library"],
|
||||
)
|
||||
|
||||
@@ -19,7 +19,7 @@ package event_test
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/async/event"
|
||||
"github.com/prysmaticlabs/prysm/v5/async/event"
|
||||
)
|
||||
|
||||
func ExampleFeed_acknowledgedEvents() {
|
||||
|
||||
@@ -20,7 +20,7 @@ import (
|
||||
"fmt"
|
||||
"sync"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/async/event"
|
||||
"github.com/prysmaticlabs/prysm/v5/async/event"
|
||||
)
|
||||
|
||||
// This example demonstrates how SubscriptionScope can be used to control the lifetime of
|
||||
|
||||
@@ -19,7 +19,7 @@ package event_test
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/async/event"
|
||||
"github.com/prysmaticlabs/prysm/v5/async/event"
|
||||
)
|
||||
|
||||
func ExampleNewSubscription() {
|
||||
|
||||
@@ -23,7 +23,7 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/assert"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/assert"
|
||||
)
|
||||
|
||||
func TestFeedPanics(t *testing.T) {
|
||||
|
||||
@@ -21,7 +21,7 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/time/mclock"
|
||||
"github.com/prysmaticlabs/prysm/v5/time/mclock"
|
||||
)
|
||||
|
||||
// waitQuotient is divided against the max backoff time, in order to have N requests based on the full
|
||||
|
||||
@@ -23,7 +23,7 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/v4/testing/require"
|
||||
"github.com/prysmaticlabs/prysm/v5/testing/require"
|
||||
)
|
||||
|
||||
var errInts = errors.New("error in subscribeInts")
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user