Add golang.org/x/tools modernize static analyzer and fix violations (#15946)

* Ran gopls modernize to fix everything

go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...

* Override rules_go provided dependency for golang.org/x/tools to v0.38.0.

To update this, checked out rules_go, then ran `bazel run //go/tools/releaser -- upgrade-dep -mirror=false org_golang_x_tools` and copied the patches.

* Fix buildtag violations and ignore buildtag violations in external

* Introduce modernize analyzer package.

* Add modernize "any" analyzer.

* Fix violations of any analyzer

* Add modernize "appendclipped" analyzer.

* Fix violations of appendclipped

* Add modernize "bloop" analyzer.

* Add modernize "fmtappendf" analyzer.

* Add modernize "forvar" analyzer.

* Add modernize "mapsloop" analyzer.

* Add modernize "minmax" analyzer.

* Fix violations of minmax analyzer

* Add modernize "omitzero" analyzer.

* Add modernize "rangeint" analyzer.

* Fix violations of rangeint.

* Add modernize "reflecttypefor" analyzer.

* Fix violations of reflecttypefor analyzer.

* Add modernize "slicescontains" analyzer.

* Add modernize "slicessort" analyzer.

* Add modernize "slicesdelete" analyzer. This is disabled by default for now. See https://go.dev/issue/73686.

* Add modernize "stringscutprefix" analyzer.

* Add modernize "stringsbuilder" analyzer.

* Fix violations of stringsbuilder analyzer.

* Add modernize "stringsseq" analyzer.

* Add modernize "testingcontext" analyzer.

* Add modernize "waitgroup" analyzer.

* Changelog fragment

* gofmt

* gazelle

* Add modernize "newexpr" analyzer.

* Disable newexpr until go1.26

* Add more details in WORKSPACE on how to update the override

* @nalepae feedback on min()

* gofmt

* Fix violations of forvar
This commit is contained in:
Preston Van Loon
2025-11-13 19:27:22 -06:00
committed by GitHub
parent f77b78943a
commit 2fd6bd8150
605 changed files with 217475 additions and 2228 deletions

View File

@@ -20,7 +20,7 @@ func TestDelete(t *testing.T) {
numAccounts := 5
keystores := make([]*keymanager.Keystore, numAccounts)
passwords := make([]string, numAccounts)
for i := 0; i < numAccounts; i++ {
for i := range numAccounts {
keystores[i] = createRandomKeystore(t, password)
passwords[i] = password
}

View File

@@ -156,7 +156,7 @@ func displayExitInfo(rawExitedKeys [][]byte, trimmedExitedKeys []string) {
urlFormattedPubKeys[i] = formatBeaconChaURL(key)
}
ifaceKeys := make([]interface{}, len(urlFormattedPubKeys))
ifaceKeys := make([]any, len(urlFormattedPubKeys))
for i, k := range urlFormattedPubKeys {
ifaceKeys[i] = k
}

View File

@@ -65,7 +65,7 @@ func selectAccounts(selectionPrompt string, pubKeys [][fieldparams.BLSPubkeyLeng
}
if result == allAccountsText {
fmt.Printf("%s\n", au.BrightRed("[Selected all accounts]").Bold())
for i := 0; i < len(pubKeys); i++ {
for i := range pubKeys {
results = append(results, i)
}
break

View File

@@ -154,7 +154,7 @@ func TestListAccounts_LocalKeymanager(t *testing.T) {
numAccounts := 5
keystores := make([]*keymanager.Keystore, numAccounts)
passwords := make([]string, numAccounts)
for i := 0; i < numAccounts; i++ {
for i := range numAccounts {
keystores[i] = createRandomKeystore(t, password)
passwords[i] = password
}

View File

@@ -590,7 +590,7 @@ func TestAttestToBlockHead_DoesAttestAfterDelay(t *testing.T) {
BeaconBlockRoot: bytesutil.PadTo([]byte("A"), 32),
Target: &ethpb.Checkpoint{Root: bytesutil.PadTo([]byte("B"), 32)},
Source: &ethpb.Checkpoint{Root: bytesutil.PadTo([]byte("C"), 32), Epoch: 3},
}, nil).Do(func(arg0, arg1 interface{}) {
}, nil).Do(func(arg0, arg1 any) {
wg.Done()
})
@@ -745,12 +745,10 @@ func TestServer_WaitToSlotOneThird_ReceiveBlockSlot(t *testing.T) {
}
wg := &sync.WaitGroup{}
wg.Add(1)
go func() {
wg.Go(func() {
time.Sleep(100 * time.Millisecond)
v.slotFeed.Send(currentSlot)
wg.Done()
}()
})
v.waitOneThirdOrValidBlock(t.Context(), currentSlot)

View File

@@ -220,15 +220,9 @@ func (c beaconApiChainClient) Validators(ctx context.Context, in *ethpb.ListVali
return nil, errors.New("state validators data is nil")
}
start := pageToken * uint64(pageSize)
if start > uint64(len(stateValidators.Data)) {
start = uint64(len(stateValidators.Data))
}
start := min(pageToken*uint64(pageSize), uint64(len(stateValidators.Data)))
end := start + uint64(pageSize)
if end > uint64(len(stateValidators.Data)) {
end = uint64(len(stateValidators.Data))
}
end := min(start+uint64(pageSize), uint64(len(stateValidators.Data)))
validators := make([]*ethpb.Validators_ValidatorContainer, end-start)
for idx := start; idx < end; idx++ {

View File

@@ -390,7 +390,7 @@ func TestListValidators(t *testing.T) {
// Generate more than 250 validators, but expect only 250 to be returned
validators := make([]*structs.ValidatorContainer, 267)
for idx := 0; idx < len(validators); idx++ {
for idx := range validators {
validators[idx] = validValidatorsResponse.Data[0]
}
@@ -402,7 +402,7 @@ func TestListValidators(t *testing.T) {
},
generateProtoValidatorsResponse: func() *ethpb.Validators {
validators := make([]*ethpb.Validators_ValidatorContainer, 250)
for idx := 0; idx < len(validators); idx++ {
for idx := range validators {
validators[idx] = &ethpb.Validators_ValidatorContainer{
Index: 1,
Validator: &ethpb.Validator{

View File

@@ -656,7 +656,7 @@ func TestGetDutiesForEpoch_Error(t *testing.T) {
).AnyTimes()
vals := make([]validatorForDuty, len(pubkeys))
for i := 0; i < len(pubkeys); i++ {
for i := range pubkeys {
vals[i] = validatorForDuty{
pubkey: pubkeys[i],
index: validatorIndices[i],
@@ -883,7 +883,7 @@ func TestGetDutiesForEpoch_Valid(t *testing.T) {
validatorClient := &beaconApiValidatorClient{dutiesProvider: dutiesProvider}
vals := make([]validatorForDuty, len(pubkeys))
for i := 0; i < len(pubkeys); i++ {
for i := range pubkeys {
vals[i] = validatorForDuty{
pubkey: pubkeys[i],
index: validatorIndices[i],
@@ -933,7 +933,7 @@ func TestGetDuties_Valid(t *testing.T) {
pubkeys := make([][]byte, valCount)
validatorIndices := make([]primitives.ValidatorIndex, valCount)
vals := make([]validatorForDuty, valCount)
for i := 0; i < valCount; i++ {
for i := range valCount {
pubkeys[i] = []byte(strconv.Itoa(i))
validatorIndices[i] = primitives.ValidatorIndex(i)
vals[i] = validatorForDuty{
@@ -1399,7 +1399,7 @@ func generateValidSyncDuties(pubkeys [][]byte, validatorIndices []primitives.Val
// We will use a reverse function to easily make sure that the current epoch and next epoch data returned by dutiesForEpoch
// are not the same
func reverseSlice[T interface{}](slice []T) []T {
func reverseSlice[T any](slice []T) []T {
reversedSlice := make([]T, len(slice))
for i := range slice {
reversedSlice[len(reversedSlice)-1-i] = slice[i]

View File

@@ -43,7 +43,7 @@ func TestGetBeaconBlock_RequestFailed(t *testing.T) {
func TestGetBeaconBlock_Error(t *testing.T) {
testCases := []struct {
name string
beaconBlock interface{}
beaconBlock any
expectedErrorMessage string
consensusVersion string
blinded bool

View File

@@ -22,9 +22,9 @@ import (
type reqOption func(*http.Request)
type RestHandler interface {
Get(ctx context.Context, endpoint string, resp interface{}) error
Get(ctx context.Context, endpoint string, resp any) error
GetSSZ(ctx context.Context, endpoint string) ([]byte, http.Header, error)
Post(ctx context.Context, endpoint string, headers map[string]string, data *bytes.Buffer, resp interface{}) error
Post(ctx context.Context, endpoint string, headers map[string]string, data *bytes.Buffer, resp any) error
PostSSZ(ctx context.Context, endpoint string, headers map[string]string, data *bytes.Buffer) ([]byte, http.Header, error)
HttpClient() *http.Client
Host() string
@@ -70,7 +70,7 @@ func (c *BeaconApiRestHandler) Host() string {
// Get sends a GET request and decodes the response body as a JSON object into the passed in object.
// If an HTTP error is returned, the body is decoded as a DefaultJsonError JSON object and returned as the first return value.
func (c *BeaconApiRestHandler) Get(ctx context.Context, endpoint string, resp interface{}) error {
func (c *BeaconApiRestHandler) Get(ctx context.Context, endpoint string, resp any) error {
url := c.host + endpoint
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
if err != nil {
@@ -150,7 +150,7 @@ func (c *BeaconApiRestHandler) Post(
apiEndpoint string,
headers map[string]string,
data *bytes.Buffer,
resp interface{},
resp any,
) error {
if data == nil {
return errors.New("data is nil")
@@ -249,7 +249,7 @@ func (c *BeaconApiRestHandler) PostSSZ(
return body, httpResp.Header, nil
}
func decodeResp(httpResp *http.Response, resp interface{}) error {
func decodeResp(httpResp *http.Response, resp any) error {
body, err := io.ReadAll(httpResp.Body)
if err != nil {
return errors.Wrapf(err, "failed to read response body for %s", httpResp.Request.URL)

View File

@@ -36,12 +36,10 @@ func TestHealthMonitor_IsHealthy_Concurrency(t *testing.T) {
var wg sync.WaitGroup
numGoroutines := 10
for i := 0; i < numGoroutines; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for range numGoroutines {
wg.Go(func() {
assert.True(t, monitor.IsHealthy())
}()
})
}
wg.Wait()
@@ -50,12 +48,10 @@ func TestHealthMonitor_IsHealthy_Concurrency(t *testing.T) {
monitor.isHealthy = false
monitor.Unlock()
for i := 0; i < numGoroutines; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for range numGoroutines {
wg.Go(func() {
assert.False(t, monitor.IsHealthy())
}()
})
}
wg.Wait()
}

View File

@@ -170,7 +170,7 @@ func TestAttests_NextSlot(t *testing.T) {
ctrl := gomock.NewController(t)
defer ctrl.Finish()
attSubmitted := make(chan interface{})
attSubmitted := make(chan any)
v := &testutil.FakeValidator{Km: &mockKeymanager{accountsChangedFeed: &event.Feed{}}, AttSubmitted: attSubmitted}
ctx, cancel := context.WithCancel(t.Context())
@@ -192,7 +192,7 @@ func TestAttests_NextSlot(t *testing.T) {
func TestProposes_NextSlot(t *testing.T) {
ctrl := gomock.NewController(t)
defer ctrl.Finish()
blockProposed := make(chan interface{})
blockProposed := make(chan any)
v := &testutil.FakeValidator{Km: &mockKeymanager{accountsChangedFeed: &event.Feed{}}, BlockProposed: blockProposed}
ctx, cancel := context.WithCancel(t.Context())
@@ -216,8 +216,8 @@ func TestBothProposesAndAttests_NextSlot(t *testing.T) {
ctrl := gomock.NewController(t)
defer ctrl.Finish()
blockProposed := make(chan interface{})
attSubmitted := make(chan interface{})
blockProposed := make(chan any)
attSubmitted := make(chan any)
v := &testutil.FakeValidator{Km: &mockKeymanager{accountsChangedFeed: &event.Feed{}}, BlockProposed: blockProposed, AttSubmitted: attSubmitted}
ctx, cancel := context.WithCancel(t.Context())
@@ -274,7 +274,7 @@ func TestKeyReload_NoActiveKey(t *testing.T) {
func notActive(t *testing.T) [fieldparams.BLSPubkeyLength]byte {
var r [fieldparams.BLSPubkeyLength]byte
copy(r[:], testutil.ActiveKey[:])
for i := 0; i < len(r); i++ {
for i := range len(r) {
r[i] = bits.Reverse8(r[i])
}
require.DeepNotEqual(t, r, testutil.ActiveKey)

View File

@@ -52,8 +52,8 @@ type FakeValidator struct {
PubkeyToIndexMap map[[fieldparams.BLSPubkeyLength]byte]uint64
IndexToPubkeyMap map[uint64][fieldparams.BLSPubkeyLength]byte
WaitForChainStartCalled int
AttSubmitted chan interface{}
BlockProposed chan interface{}
AttSubmitted chan any
BlockProposed chan any
AccountsChannel chan [][fieldparams.BLSPubkeyLength]byte
GenesisT time.Time
ReceiveBlocksCalled int

View File

@@ -63,7 +63,7 @@ var unknownIndex = primitives.ValidatorIndex(^uint64(0))
func genMockKeymanager(t *testing.T, numKeys int) *mockKeymanager {
pairs := make([]keypair, numKeys)
for i := 0; i < numKeys; i++ {
for i := range numKeys {
pairs[i] = randKeypair(t)
}
@@ -859,7 +859,7 @@ type doppelGangerRequestMatcher struct {
var _ gomock.Matcher = (*doppelGangerRequestMatcher)(nil)
func (m *doppelGangerRequestMatcher) Matches(x interface{}) bool {
func (m *doppelGangerRequestMatcher) Matches(x any) bool {
r, ok := x.(*ethpb.DoppelGangerRequest)
if !ok {
panic("Invalid match type")
@@ -1011,7 +1011,7 @@ func TestValidator_CheckDoppelGanger(t *testing.T) {
attLimit := 5
for i, k := range keys {
pkey := k
for j := 0; j < attLimit; j++ {
for j := range attLimit {
att := createAttestation(10+primitives.Epoch(j), 12+primitives.Epoch(j))
rt, err := att.Data.HashTreeRoot()
assert.NoError(t, err)
@@ -1362,7 +1362,7 @@ type PrepareBeaconProposerRequestMatcher struct {
expectedRecipients []*ethpb.PrepareBeaconProposerRequest_FeeRecipientContainer
}
func (m *PrepareBeaconProposerRequestMatcher) Matches(x interface{}) bool {
func (m *PrepareBeaconProposerRequestMatcher) Matches(x any) bool {
req, ok := x.(*ethpb.PrepareBeaconProposerRequest)
if !ok {
return false

View File

@@ -140,7 +140,7 @@ func TestWaitForActivation_AccountsChanged(t *testing.T) {
&ethpb.MultipleValidatorStatusRequest{
PublicKeys: [][]byte{inactive.pub[:]},
},
).Return(inactiveResp, nil).Do(func(arg0, arg1 interface{}) {
).Return(inactiveResp, nil).Do(func(arg0, arg1 any) {
require.NoError(t, km.add(active))
km.SimulateAccountChanges([][fieldparams.BLSPubkeyLength]byte{inactive.pub, active.pub})
}),
@@ -215,7 +215,7 @@ func TestWaitForActivation_AccountsChanged(t *testing.T) {
&ethpb.MultipleValidatorStatusRequest{
PublicKeys: [][]byte{inactivePubKey[:]},
},
).Return(inactiveResp, nil).Do(func(arg0, arg1 interface{}) {
).Return(inactiveResp, nil).Do(func(arg0, arg1 any) {
err = km.RecoverAccountsFromMnemonic(ctx, constant.TestMnemonic, derived.DefaultMnemonicLanguage, "", 2)
require.NoError(t, err)
pks, err := km.FetchValidatingPublicKeys(ctx)

View File

@@ -488,8 +488,7 @@ func BenchmarkStore_SaveAttestationForPubKey(b *testing.B) {
validatorDB, err := NewStore(b.TempDir(), &Config{PubKeys: pubkeys})
require.NoError(b, err)
b.ResetTimer()
for i := 0; i < b.N; i++ {
for b.Loop() {
b.StopTimer()
err := validatorDB.ClearDB()
require.NoError(b, err)

View File

@@ -80,7 +80,7 @@ func TestStore_ImportInterchangeData_BadFormat_PreventsDBWrites(t *testing.T) {
// verify nothing was saved to the DB. If there is an error in the import process, we need to make
// sure writing is an atomic operation: either the import succeeds and saves the slashing protection
// data to our DB, or it does not.
for i := 0; i < len(publicKeys); i++ {
for i := range publicKeys {
receivedHistory, err := s.ProposalHistoryForPubKey(ctx, publicKeys[i])
require.NoError(t, err)
require.DeepEqual(
@@ -122,7 +122,7 @@ func TestStore_ImportInterchangeData_OK(t *testing.T) {
// Next, we attempt to retrieve the attesting and proposals histories from our database and
// verify those indeed match the originally generated mock histories.
for i := 0; i < len(publicKeys); i++ {
for i := range publicKeys {
for _, att := range attestingHistory[i] {
indexedAtt := &ethpb.IndexedAttestation{
Data: &ethpb.AttestationData{

View File

@@ -24,7 +24,7 @@ func TestPendingAttestationRecords_Flush(t *testing.T) {
// Add 5 atts
num := 5
for i := 0; i < num; i++ {
for i := range num {
queue.Append(&common.AttestationRecord{
Target: primitives.Epoch(i),
})
@@ -543,8 +543,8 @@ func benchCheckSurroundVote(
} else {
surroundingVote = createAttestation(numEpochs+1, numEpochs+2)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
for b.Loop() {
for _, pubKey := range pubKeys {
slashingKind, err := validatorDB.CheckSlashableAttestation(ctx, pubKey, []byte{}, surroundingVote)
if shouldSurround {
@@ -594,7 +594,7 @@ func BenchmarkStore_SaveAttestationForPubKey(b *testing.B) {
validatorDB, err := NewKVStore(ctx, b.TempDir(), &Config{PubKeys: pubkeys})
require.NoError(b, err)
for i := 0; i < b.N; i++ {
for b.Loop() {
b.StopTimer()
err := validatorDB.ClearDB()
require.NoError(b, err)

View File

@@ -13,7 +13,7 @@ func TestStore_EIPBlacklistedPublicKeys(t *testing.T) {
ctx := t.Context()
numValidators := 100
publicKeys := make([][fieldparams.BLSPubkeyLength]byte, numValidators)
for i := 0; i < numValidators; i++ {
for i := range numValidators {
var key [fieldparams.BLSPubkeyLength]byte
copy(key[:], fmt.Sprintf("%d", i))
publicKeys[i] = key

View File

@@ -72,7 +72,7 @@ func TestStore_ImportInterchangeData_BadFormat_PreventsDBWrites(t *testing.T) {
// verify nothing was saved to the DB. If there is an error in the import process, we need to make
// sure writing is an atomic operation: either the import succeeds and saves the slashing protection
// data to our DB, or it does not.
for i := 0; i < len(publicKeys); i++ {
for i := range publicKeys {
for _, att := range attestingHistory[i] {
indexedAtt := &ethpb.IndexedAttestation{
Data: &ethpb.AttestationData{
@@ -126,7 +126,7 @@ func TestStore_ImportInterchangeData_OK(t *testing.T) {
// Next, we attempt to retrieve the attesting and proposals histories from our database and
// verify those indeed match the originally generated mock histories.
for i := 0; i < len(publicKeys); i++ {
for i := range publicKeys {
for _, att := range attestingHistory[i] {
indexedAtt := &ethpb.IndexedAttestation{
Data: &ethpb.AttestationData{

View File

@@ -81,7 +81,7 @@ func Test_migrateOptimalAttesterProtectionUp(t *testing.T) {
}
// Verify we have (source epoch, target epoch) pairs for epochs 0 to 50 correctly.
for sourceEpoch := uint64(0); sourceEpoch < numEpochs; sourceEpoch++ {
for sourceEpoch := range numEpochs {
sourceEpochBytes := bytesutil.Uint64ToBytesBigEndian(sourceEpoch)
targetEpochBytes := sourceEpochsBucket.Get(sourceEpochBytes)
targetEpoch := bytesutil.BytesToUint64BigEndian(targetEpochBytes)

View File

@@ -18,7 +18,7 @@ func TestStore_migrateSourceTargetEpochsBucketUp(t *testing.T) {
// See: https://github.com/prysmaticlabs/prysm/issues/8509
numKeys := 2*publicKeyMigrationBatchSize + 1
pubKeys := make([][fieldparams.BLSPubkeyLength]byte, numKeys)
for i := 0; i < numKeys; i++ {
for i := range numKeys {
var pk [fieldparams.BLSPubkeyLength]byte
copy(pk[:], fmt.Sprintf("%d", i))
pubKeys[i] = pk
@@ -119,7 +119,7 @@ func TestStore_migrateSourceTargetEpochsBucketDown(t *testing.T) {
// See: https://github.com/prysmaticlabs/prysm/issues/8509
numKeys := 2*publicKeyMigrationBatchSize + 1
pubKeys := make([][fieldparams.BLSPubkeyLength]byte, numKeys)
for i := 0; i < numKeys; i++ {
for i := range numKeys {
var pk [fieldparams.BLSPubkeyLength]byte
copy(pk[:], fmt.Sprintf("%d", i))
pubKeys[i] = pk

View File

@@ -41,7 +41,7 @@ func TestPruneAttestations_NoPruning(t *testing.T) {
func TestPruneAttestations_OK(t *testing.T) {
numKeys := uint64(64)
pks := make([][fieldparams.BLSPubkeyLength]byte, 0, numKeys)
for i := uint64(0); i < numKeys; i++ {
for i := range numKeys {
pks = append(pks, bytesutil.ToBytes48(bytesutil.ToBytes(i, 48)))
}
validatorDB := setupDB(t, pks)
@@ -90,7 +90,7 @@ func TestPruneAttestations_OK(t *testing.T) {
func BenchmarkPruneAttestations(b *testing.B) {
numKeys := uint64(8)
pks := make([][fieldparams.BLSPubkeyLength]byte, 0, numKeys)
for i := uint64(0); i < numKeys; i++ {
for i := range numKeys {
pks = append(pks, bytesutil.ToBytes48(bytesutil.ToBytes(i, 48)))
}
validatorDB := setupDB(b, pks)
@@ -99,8 +99,7 @@ func BenchmarkPruneAttestations(b *testing.B) {
// since genesis to SLASHING_PROTECTION_PRUNING_EPOCHS * 20.
numEpochs := params.BeaconConfig().SlashingProtectionPruningEpochs * 20
b.ResetTimer()
for i := 0; i < b.N; i++ {
for b.Loop() {
b.StopTimer()
for _, pk := range pks {
require.NoError(b, setupAttestationsForEveryEpoch(validatorDB, pk, numEpochs))
@@ -128,7 +127,7 @@ func setupAttestationsForEveryEpoch(validatorDB *Store, pubKey [48]byte, numEpoc
if err != nil {
return err
}
for sourceEpoch := primitives.Epoch(0); sourceEpoch < numEpochs; sourceEpoch++ {
for sourceEpoch := range numEpochs {
targetEpoch := sourceEpoch + 1
targetEpochBytes := bytesutil.EpochToBytesBigEndian(targetEpoch)
sourceEpochBytes := bytesutil.EpochToBytesBigEndian(sourceEpoch)

View File

@@ -66,7 +66,7 @@ func (km *Keymanager) RecoverAccountsFromMnemonic(
}
privKeys := make([][]byte, numAccounts)
pubKeys := make([][]byte, numAccounts)
for i := 0; i < numAccounts; i++ {
for i := range numAccounts {
privKey, err := util.PrivateKeyFromSeedAndPath(
seed, fmt.Sprintf(ValidatingKeyDerivationPathTemplate, i),
)
@@ -156,7 +156,7 @@ func (km *Keymanager) ListKeymanagerAccounts(ctx context.Context, cfg keymanager
} else {
fmt.Printf("Showing %d validator accounts\n", len(accountNames))
}
for i := 0; i < len(accountNames); i++ {
for i := range accountNames {
fmt.Println("")
validatingKeyPath := fmt.Sprintf(ValidatingKeyDerivationPathTemplate, i)

View File

@@ -99,7 +99,7 @@ func TestDerivedKeymanager_FetchValidatingPublicKeys(t *testing.T) {
require.Equal(t, numAccounts, len(publicKeys))
wantedPubKeys := make([][fieldparams.BLSPubkeyLength]byte, numAccounts)
for i := 0; i < numAccounts; i++ {
for i := range numAccounts {
privKey, err := util.PrivateKeyFromSeedAndPath(derivedSeed, fmt.Sprintf(ValidatingKeyDerivationPathTemplate, i))
require.NoError(t, err)
var pubKey [fieldparams.BLSPubkeyLength]byte
@@ -138,7 +138,7 @@ func TestDerivedKeymanager_FetchValidatingPrivateKeys(t *testing.T) {
require.Equal(t, numAccounts, len(privateKeys))
wantedPrivKeys := make([][32]byte, numAccounts)
for i := 0; i < numAccounts; i++ {
for i := range numAccounts {
privKey, err := util.PrivateKeyFromSeedAndPath(derivedSeed, fmt.Sprintf(ValidatingKeyDerivationPathTemplate, i))
require.NoError(t, err)
var privKeyBytes [32]byte

View File

@@ -15,7 +15,7 @@ func TestLocalKeymanager_ExtractKeystores(t *testing.T) {
secretKeysCache = make(map[[fieldparams.BLSPubkeyLength]byte]bls.SecretKey)
dr := &Keymanager{}
validatingKeys := make([]bls.SecretKey, 10)
for i := 0; i < len(validatingKeys); i++ {
for i := range validatingKeys {
secretKey, err := bls.RandKey()
require.NoError(t, err)
validatingKeys[i] = secretKey

View File

@@ -29,7 +29,7 @@ func TestLocalKeymanager_DeleteKeystores(t *testing.T) {
ctx := t.Context()
keystores := make([]*keymanager.Keystore, numAccounts)
passwords := make([]string, numAccounts)
for i := 0; i < numAccounts; i++ {
for i := range numAccounts {
keystores[i] = createRandomKeystore(t, password)
passwords[i] = password
}

View File

@@ -44,7 +44,7 @@ func (km *Keymanager) ImportKeystores(
for i := 0; i < len(storeCopy.PrivateKeys); i++ {
existingPubKeys[string(storeCopy.PublicKeys[i])] = true
}
for i := 0; i < len(keystores); i++ {
for i := range keystores {
var privKeyBytes []byte
var pubKeyBytes []byte
privKeyBytes, pubKeyBytes, _, err = km.attemptDecryptKeystore(decryptor, keystores[i], passwords[i])

View File

@@ -41,7 +41,7 @@ func TestLocalKeymanager_NoDuplicates(t *testing.T) {
numKeys := 50
pubKeys := make([][]byte, numKeys)
privKeys := make([][]byte, numKeys)
for i := 0; i < numKeys; i++ {
for i := range numKeys {
priv, err := bls.RandKey()
require.NoError(t, err)
privKeys[i] = priv.Marshal()
@@ -111,7 +111,7 @@ func TestLocalKeymanager_ImportKeystores(t *testing.T) {
numKeystores := 5
keystores := make([]*keymanager.Keystore, numKeystores)
passwords := make([]string, numKeystores)
for i := 0; i < numKeystores; i++ {
for i := range numKeystores {
keystores[i] = createRandomKeystore(t, password)
passwords[i] = password
}
@@ -131,7 +131,7 @@ func TestLocalKeymanager_ImportKeystores(t *testing.T) {
numKeystores := 5
keystores := make([]*keymanager.Keystore, numKeystores)
passwords := make([]string, numKeystores)
for i := 0; i < numKeystores; i++ {
for i := range numKeystores {
pass := password + strconv.Itoa(i)
keystores[i] = createRandomKeystore(t, pass)
passwords[i] = pass

View File

@@ -71,10 +71,10 @@ func (a *accountStore) Copy() *accountStore {
// AccountsKeystoreRepresentation defines an internal Prysm representation
// of validator accounts, encrypted according to the EIP-2334 standard.
type AccountsKeystoreRepresentation struct {
Crypto map[string]interface{} `json:"crypto"`
ID string `json:"uuid"`
Version uint `json:"version"`
Name string `json:"name"`
Crypto map[string]any `json:"crypto"`
ID string `json:"uuid"`
Version uint `json:"version"`
Name string `json:"name"`
}
// ResetCaches for the keymanager.
@@ -127,7 +127,7 @@ func NewInteropKeymanager(_ context.Context, offset, numValidatorKeys uint64) (*
}
lock.Lock()
pubKeys := make([][fieldparams.BLSPubkeyLength]byte, numValidatorKeys)
for i := uint64(0); i < numValidatorKeys; i++ {
for i := range numValidatorKeys {
publicKey := bytesutil.ToBytes48(publicKeys[i].Marshal())
pubKeys[i] = publicKey
secretKeysCache[publicKey] = secretKeys[i]
@@ -374,7 +374,7 @@ func updateAccountsStoreKeys(store *accountStore, privateKeys, publicKeys [][]by
}
// We append to the accounts store keys only
// if the private/secret key do not already exist, to prevent duplicates.
for i := 0; i < len(privateKeys); i++ {
for i := range privateKeys {
sk := privateKeys[i]
pk := publicKeys[i]
_, privKeyExists := existingPrivKeys[string(sk)]
@@ -414,7 +414,7 @@ func (km *Keymanager) ListKeymanagerAccounts(ctx context.Context, cfg keymanager
return errors.Wrap(err, "could not fetch private keys")
}
}
for i := 0; i < len(accountNames); i++ {
for i := range accountNames {
fmt.Println("")
fmt.Printf("%s | %s\n", au.BrightBlue(fmt.Sprintf("Account %d", i)).Bold(), au.BrightGreen(accountNames[i]).Bold())
fmt.Printf("%s %#x\n", au.BrightMagenta("[validating public key]").Bold(), pubKeys[i])
@@ -429,12 +429,12 @@ func (km *Keymanager) ListKeymanagerAccounts(ctx context.Context, cfg keymanager
}
func CreatePrintoutOfKeys(keys [][]byte) string {
var keysStr string
var keysStr strings.Builder
for i, k := range keys {
if i != 0 {
keysStr += "," // Add a comma before each key except the first one
keysStr.WriteString(",") // Add a comma before each key except the first one
}
keysStr += fmt.Sprintf("%#x", bytesutil.Trunc(k))
keysStr.WriteString(fmt.Sprintf("%#x", bytesutil.Trunc(k)))
}
return keysStr
return keysStr.String()
}

View File

@@ -29,7 +29,7 @@ func TestLocalKeymanager_FetchValidatingPublicKeys(t *testing.T) {
ctx := t.Context()
numAccounts := 10
wantedPubKeys := make([][fieldparams.BLSPubkeyLength]byte, 0)
for i := 0; i < numAccounts; i++ {
for range numAccounts {
privKey, err := bls.RandKey()
require.NoError(t, err)
pubKey := bytesutil.ToBytes48(privKey.PublicKey().Marshal())
@@ -61,7 +61,7 @@ func TestLocalKeymanager_FetchValidatingPrivateKeys(t *testing.T) {
ctx := t.Context()
numAccounts := 10
wantedPrivateKeys := make([][32]byte, numAccounts)
for i := 0; i < numAccounts; i++ {
for i := range numAccounts {
privKey, err := bls.RandKey()
require.NoError(t, err)
privKeyData := privKey.Marshal()
@@ -97,7 +97,7 @@ func TestLocalKeymanager_Sign(t *testing.T) {
numAccounts := 10
keystores := make([]*keymanager.Keystore, numAccounts)
passwords := make([]string, numAccounts)
for i := 0; i < numAccounts; i++ {
for i := range numAccounts {
keystores[i] = createRandomKeystore(t, password)
passwords[i] = password
}

View File

@@ -53,13 +53,13 @@ func (km *Keymanager) listenForAccountChanges(ctx context.Context) {
}
ctx, cancel := context.WithCancel(ctx)
defer cancel()
fileChangesChan := make(chan interface{}, 100)
fileChangesChan := make(chan any, 100)
defer close(fileChangesChan)
// We debounce events sent over the file changes channel by an interval
// to ensure we are not overwhelmed by a ton of events fired over the channel in
// a short span of time.
go async.Debounce(ctx, debounceFileChangesInterval, fileChangesChan, func(event interface{}) {
go async.Debounce(ctx, debounceFileChangesInterval, fileChangesChan, func(event any) {
ev, ok := event.(fsnotify.Event)
if !ok {
log.Errorf("Type %T is not a valid file system event", event)

View File

@@ -60,7 +60,7 @@ func TestLocalKeymanager_reloadAccountsFromKeystore(t *testing.T) {
numAccounts := 20
privKeys := make([][]byte, numAccounts)
pubKeys := make([][]byte, numAccounts)
for i := 0; i < numAccounts; i++ {
for i := range numAccounts {
privKey, err := bls.RandKey()
require.NoError(t, err)
privKeys[i] = privKey.Marshal()

View File

@@ -189,7 +189,7 @@ func (client *ApiClient) doRequest(ctx context.Context, httpMethod, fullPath str
}
// unmarshalResponse is a utility method for unmarshalling responses.
func unmarshalResponse(responseBody io.ReadCloser, unmarshalledResponseObject interface{}) error {
func unmarshalResponse(responseBody io.ReadCloser, unmarshalledResponseObject any) error {
defer closeBody(responseBody)
if err := json.NewDecoder(responseBody).Decode(&unmarshalledResponseObject); err != nil {
body, err := io.ReadAll(responseBody)

View File

@@ -756,7 +756,7 @@ func (km *Keymanager) ListKeymanagerAccounts(ctx context.Context, cfg keymanager
// DisplayRemotePublicKeys prints remote public keys to stdout.
func DisplayRemotePublicKeys(validatingPubKeys [][48]byte) {
au := aurora.NewAurora(true)
for i := 0; i < len(validatingPubKeys); i++ {
for i := range validatingPubKeys {
fmt.Println("")
fmt.Printf(
"%s\n", au.BrightGreen(petnames.DeterministicName(validatingPubKeys[i][:], "-")).Bold(),

View File

@@ -100,13 +100,13 @@ type AccountLister interface {
// Keystore json file representation as a Go struct.
type Keystore struct {
Crypto map[string]interface{} `json:"crypto"`
ID string `json:"uuid"`
Pubkey string `json:"pubkey"`
Version uint `json:"version"`
Description string `json:"description"`
Name string `json:"name,omitempty"` // field deprecated in favor of description, EIP2335
Path string `json:"path"`
Crypto map[string]any `json:"crypto"`
ID string `json:"uuid"`
Pubkey string `json:"pubkey"`
Version uint `json:"version"`
Description string `json:"description"`
Name string `json:"name,omitempty"` // field deprecated in favor of description, EIP2335
Path string `json:"path"`
}
// Kind defines an enum for either local, derived, or remote-signing

View File

@@ -641,8 +641,8 @@ func clearDB(ctx context.Context, dataDir string, force bool, isDatabaseMinimal
func parseBeaconApiHeaders(rawHeaders string) map[string][]string {
result := make(map[string][]string)
pairs := strings.Split(rawHeaders, ",")
for _, pair := range pairs {
pairs := strings.SplitSeq(rawHeaders, ",")
for pair := range pairs {
key, value, found := strings.Cut(pair, "=")
if !found {
// Skip malformed pairs

View File

@@ -42,7 +42,7 @@ func TestServer_AuthenticateUsingExistingToken(t *testing.T) {
unaryInfo := &grpc.UnaryServerInfo{
FullMethod: "Proto.CreateWallet",
}
unaryHandler := func(ctx context.Context, req interface{}) (interface{}, error) {
unaryHandler := func(ctx context.Context, req any) (any, error) {
return nil, nil
}
ctxMD := map[string][]string{

View File

@@ -74,7 +74,7 @@ func TestServer_CreateWallet_Local(t *testing.T) {
encryptor := keystorev4.New()
keystores := make([]string, 3)
passwords := make([]string, 3)
for i := 0; i < len(keystores); i++ {
for i := range keystores {
privKey, err := bls.RandKey()
require.NoError(t, err)
pubKey := fmt.Sprintf("%x", privKey.PublicKey().Marshal())

View File

@@ -76,7 +76,7 @@ func (s *Server) ListAccounts(w http.ResponseWriter, r *http.Request) {
return
}
accs := make([]*Account, len(keys))
for i := 0; i < len(keys); i++ {
for i := range keys {
accs[i] = &Account{
ValidatingPublicKey: hexutil.Encode(keys[i][:]),
AccountName: petnames.DeterministicName(keys[i][:], "-"),

View File

@@ -38,7 +38,7 @@ func (m *MockBeaconNodeHealthClient) Recv() (*pb.LogsResponse, error) {
return log, nil
}
func (m *MockBeaconNodeHealthClient) SendMsg(_ interface{}) error {
func (m *MockBeaconNodeHealthClient) SendMsg(_ any) error {
return m.err
}

View File

@@ -63,7 +63,7 @@ func (s *Server) ListKeystores(w http.ResponseWriter, r *http.Request) {
return
}
keystoreResponse := make([]*Keystore, len(pubKeys))
for i := 0; i < len(pubKeys); i++ {
for i := range pubKeys {
keystoreResponse[i] = &Keystore{
ValidatingPubkey: hexutil.Encode(pubKeys[i][:]),
}
@@ -276,7 +276,7 @@ func (s *Server) transformDeletedKeysStatuses(
return nil, errors.Wrap(err, "could not get public keys from DB")
}
if len(pubKeysInDB) > 0 {
for i := 0; i < len(pubKeys); i++ {
for i := range pubKeys {
keyExistsInDB := pubKeysInDB[bytesutil.ToBytes48(pubKeys[i])]
if keyExistsInDB && statuses[i].Status == keymanager.StatusNotFound {
statuses[i].Status = keymanager.StatusNotActive
@@ -419,7 +419,7 @@ func (s *Server) ListRemoteKeys(w http.ResponseWriter, r *http.Request) {
return
}
keystoreResponse := make([]*RemoteKey, len(pubKeys))
for i := 0; i < len(pubKeys); i++ {
for i := range pubKeys {
keystoreResponse[i] = &RemoteKey{
Pubkey: hexutil.Encode(pubKeys[i][:]),
Url: s.validatorService.RemoteSignerConfig().BaseEndpoint,

View File

@@ -109,7 +109,7 @@ func TestServer_ListKeystores(t *testing.T) {
resp := &ListKeystoresResponse{}
require.NoError(t, json.Unmarshal(wr.Body.Bytes(), resp))
require.Equal(t, numAccounts, len(resp.Data))
for i := 0; i < numAccounts; i++ {
for i := range numAccounts {
require.DeepEqual(t, hexutil.Encode(expectedKeys[i][:]), resp.Data[i].ValidatingPubkey)
require.Equal(
t,
@@ -243,7 +243,7 @@ func TestServer_ImportKeystores(t *testing.T) {
password := "12345678"
encodedKeystores := make([]string, numKeystores)
passwords := make([]string, numKeystores)
for i := 0; i < numKeystores; i++ {
for i := range numKeystores {
enc, err := json.Marshal(createRandomKeystore(t, password))
encodedKeystores[i] = string(enc)
require.NoError(t, err)
@@ -280,7 +280,7 @@ func TestServer_ImportKeystores(t *testing.T) {
keystores := make([]*keymanager.Keystore, numKeystores)
passwords := make([]string, numKeystores)
publicKeys := make([][fieldparams.BLSPubkeyLength]byte, numKeystores)
for i := 0; i < numKeystores; i++ {
for i := range numKeystores {
keystores[i] = createRandomKeystore(t, password)
pubKey, err := hexutil.Decode("0x" + keystores[i].Pubkey)
require.NoError(t, err)
@@ -307,7 +307,7 @@ func TestServer_ImportKeystores(t *testing.T) {
require.NoError(t, validatorDB.Close())
}()
encodedKeystores := make([]string, numKeystores)
for i := 0; i < numKeystores; i++ {
for i := range numKeystores {
enc, err := json.Marshal(keystores[i])
require.NoError(t, err)
encodedKeystores[i] = string(enc)
@@ -316,7 +316,7 @@ func TestServer_ImportKeystores(t *testing.T) {
// Generate mock slashing history.
attestingHistory := make([][]*dbCommon.AttestationRecord, 0)
proposalHistory := make([]dbCommon.ProposalHistoryForPubkey, len(publicKeys))
for i := 0; i < len(publicKeys); i++ {
for i := range publicKeys {
proposalHistory[i].Proposals = make([]dbCommon.Proposal, 0)
}
mockJSON, err := mocks.MockSlashingProtectionJSON(publicKeys, attestingHistory, proposalHistory)
@@ -439,7 +439,7 @@ func TestServer_DeleteKeystores(t *testing.T) {
// Generate mock slashing history.
attestingHistory := make([][]*dbCommon.AttestationRecord, 0)
proposalHistory := make([]dbCommon.ProposalHistoryForPubkey, len(publicKeys))
for i := 0; i < len(publicKeys); i++ {
for i := range publicKeys {
proposalHistory[i].Proposals = make([]dbCommon.Proposal, 0)
}
mockJSON, err := mocks.MockSlashingProtectionJSON(publicKeys, attestingHistory, proposalHistory)

View File

@@ -93,7 +93,7 @@ func TestImportSlashingProtection_Preconditions(t *testing.T) {
// Generate mock slashing history.
attestingHistory := make([][]*common.AttestationRecord, 0)
proposalHistory := make([]common.ProposalHistoryForPubkey, len(pubKeys))
for i := 0; i < len(pubKeys); i++ {
for i := range pubKeys {
proposalHistory[i].Proposals = make([]common.Proposal, 0)
}
mockJSON, err := mocks.MockSlashingProtectionJSON(pubKeys, attestingHistory, proposalHistory)
@@ -198,7 +198,7 @@ func TestImportExportSlashingProtection_RoundTrip(t *testing.T) {
// Generate mock slashing history.
attestingHistory := make([][]*common.AttestationRecord, 0)
proposalHistory := make([]common.ProposalHistoryForPubkey, len(pubKeys))
for i := 0; i < len(pubKeys); i++ {
for i := range pubKeys {
proposalHistory[i].Proposals = make([]common.Proposal, 0)
}
mockJSON, err := mocks.MockSlashingProtectionJSON(pubKeys, attestingHistory, proposalHistory)

View File

@@ -18,10 +18,10 @@ import (
func (s *Server) AuthTokenInterceptor() grpc.UnaryServerInterceptor {
return func(
ctx context.Context,
req interface{},
req any,
info *grpc.UnaryServerInfo,
handler grpc.UnaryHandler,
) (interface{}, error) {
) (any, error) {
if err := s.authorize(ctx); err != nil {
return nil, err
}

View File

@@ -24,7 +24,7 @@ func TestServer_AuthTokenInterceptor_Verify(t *testing.T) {
unaryInfo := &grpc.UnaryServerInfo{
FullMethod: "Proto.CreateWallet",
}
unaryHandler := func(ctx context.Context, req interface{}) (interface{}, error) {
unaryHandler := func(ctx context.Context, req any) (any, error) {
return nil, nil
}
ctxMD := map[string][]string{
@@ -45,7 +45,7 @@ func TestServer_AuthTokenInterceptor_BadToken(t *testing.T) {
unaryInfo := &grpc.UnaryServerInfo{
FullMethod: "Proto.CreateWallet",
}
unaryHandler := func(ctx context.Context, req interface{}) (interface{}, error) {
unaryHandler := func(ctx context.Context, req any) (any, error) {
return nil, nil
}

View File

@@ -139,7 +139,7 @@ func signedAttestationsByPubKey(ctx context.Context, validatorDB db.Database, pu
return nil, nil
}
signedAttestations := make([]*format.SignedAttestation, 0)
for i := 0; i < len(history); i++ {
for i := range history {
att := history[i]
// Special edge case due to a bug in Prysm's old slashing protection schema. The bug
// manifests itself as the first entry in attester slashing protection history

View File

@@ -47,7 +47,7 @@ func TestImportExport_RoundTrip(t *testing.T) {
require.NoError(t, err)
rawPublicKeys := make([][]byte, numValidators)
for i := 0; i < numValidators; i++ {
for i := range numValidators {
rawPublicKeys[i] = publicKeys[i][:]
}
@@ -133,7 +133,7 @@ func TestImportExport_RoundTrip_SkippedAttestationEpochs(t *testing.T) {
require.NoError(t, err)
rawPublicKeys := make([][]byte, numValidators)
for i := 0; i < numValidators; i++ {
for i := range numValidators {
rawPublicKeys[i] = pubKeys[i][:]
}
@@ -184,7 +184,7 @@ func TestImportExport_FilterKeys(t *testing.T) {
// Next up, we export our slashing protection database into the EIP standard file.
// Next, we attempt to import it into our validator database.
rawKeys := make([][]byte, 5)
for i := 0; i < len(rawKeys); i++ {
for i := range rawKeys {
rawKeys[i] = publicKeys[i][:]
}
@@ -228,7 +228,7 @@ func TestImportInterchangeData_OK(t *testing.T) {
// Next, we attempt to retrieve the attesting and proposals histories from our database and
// verify those indeed match the originally generated mock histories.
for i := 0; i < len(publicKeys); i++ {
for i := range publicKeys {
receivedAttestingHistory, err := validatorDB.AttestationHistoryForPubKey(ctx, publicKeys[i])
require.NoError(t, err)
@@ -399,7 +399,7 @@ func TestStore_ImportInterchangeData_BadFormat_PreventsDBWrites(t *testing.T) {
// verify nothing was saved to the DB. If there is an error in the import process, we need to make
// sure writing is an atomic operation: either the import succeeds and saves the slashing protection
// data to our DB, or it does not.
for i := 0; i < len(publicKeys); i++ {
for i := range publicKeys {
receivedAttestingHistory, err := validatorDB.AttestationHistoryForPubKey(ctx, publicKeys[i])
require.NoError(t, err)
require.Equal(

View File

@@ -23,7 +23,7 @@ func MockSlashingProtectionJSON(
standardProtectionFormat := &format.EIPSlashingProtectionFormat{}
standardProtectionFormat.Metadata.GenesisValidatorsRoot = fmt.Sprintf("%#x", bytesutil.PadTo([]byte{32}, 32))
standardProtectionFormat.Metadata.InterchangeFormatVersion = format.InterchangeFormatVersion
for i := 0; i < len(publicKeys); i++ {
for i := range publicKeys {
data := &format.ProtectionData{
Pubkey: fmt.Sprintf("%#x", publicKeys[i]),
}
@@ -58,7 +58,7 @@ func MockAttestingAndProposalHistories(pubkeys [][fieldparams.BLSPubkeyLength]by
attData := make([][]*common.AttestationRecord, numValidators)
proposalData := make([]common.ProposalHistoryForPubkey, numValidators)
gen := rand.NewGenerator()
for v := 0; v < numValidators; v++ {
for v := range numValidators {
latestTarget := primitives.Epoch(gen.Intn(int(params.BeaconConfig().WeakSubjectivityPeriod) / 1000))
// If 0, we change the value to 1 as we compute source by doing (target-1)
// to prevent any underflows in this setup helper.
@@ -96,7 +96,7 @@ func MockAttestingAndProposalHistories(pubkeys [][fieldparams.BLSPubkeyLength]by
// CreateRandomPubKeys --
func CreateRandomPubKeys(numValidators int) ([][fieldparams.BLSPubkeyLength]byte, error) {
pubKeys := make([][fieldparams.BLSPubkeyLength]byte, numValidators)
for i := 0; i < numValidators; i++ {
for i := range numValidators {
randKey, err := bls.RandKey()
if err != nil {
return nil, err
@@ -109,7 +109,7 @@ func CreateRandomPubKeys(numValidators int) ([][fieldparams.BLSPubkeyLength]byte
// CreateMockRoots --
func CreateMockRoots(numRoots int) [][32]byte {
roots := make([][32]byte, numRoots)
for i := 0; i < numRoots; i++ {
for i := range numRoots {
var rt [32]byte
copy(rt[:], fmt.Sprintf("%d", i))
}